id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
9,315
|
def calc_wedge_bounds(levels, level_width):
inners = (levels * level_width)
outers = (inners + level_width)
return (inners, outers)
|
[
"def",
"calc_wedge_bounds",
"(",
"levels",
",",
"level_width",
")",
":",
"inners",
"=",
"(",
"levels",
"*",
"level_width",
")",
"outers",
"=",
"(",
"inners",
"+",
"level_width",
")",
"return",
"(",
"inners",
",",
"outers",
")"
] |
calculate inner and outer radius bounds of the donut wedge based on levels .
|
train
| false
|
9,316
|
def test_to_string_radian_with_precision():
a = Angle(3.0, unit=u.rad)
assert (a.to_string(precision=3, sep=u'fromunit') == u'3.000rad')
|
[
"def",
"test_to_string_radian_with_precision",
"(",
")",
":",
"a",
"=",
"Angle",
"(",
"3.0",
",",
"unit",
"=",
"u",
".",
"rad",
")",
"assert",
"(",
"a",
".",
"to_string",
"(",
"precision",
"=",
"3",
",",
"sep",
"=",
"u'fromunit'",
")",
"==",
"u'3.000rad'",
")"
] |
regression test for a bug that caused to_string to crash for angles in radians when specifying the precision .
|
train
| false
|
9,317
|
def _bool_to_json(value):
if isinstance(value, bool):
value = ('true' if value else 'false')
return value
|
[
"def",
"_bool_to_json",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"value",
"=",
"(",
"'true'",
"if",
"value",
"else",
"'false'",
")",
"return",
"value"
] |
coerce value to an json-compatible representation .
|
train
| false
|
9,319
|
def remove_instance_type_access(flavorid, projectid, ctxt=None):
if (ctxt is None):
ctxt = context.get_admin_context()
return db.instance_type_access_remove(ctxt, flavorid, projectid)
|
[
"def",
"remove_instance_type_access",
"(",
"flavorid",
",",
"projectid",
",",
"ctxt",
"=",
"None",
")",
":",
"if",
"(",
"ctxt",
"is",
"None",
")",
":",
"ctxt",
"=",
"context",
".",
"get_admin_context",
"(",
")",
"return",
"db",
".",
"instance_type_access_remove",
"(",
"ctxt",
",",
"flavorid",
",",
"projectid",
")"
] |
remove instance type access for project .
|
train
| false
|
9,320
|
def showCritical(text, parent=None, help='', title='Anki'):
return showInfo(text, parent, help, 'critical', title=title)
|
[
"def",
"showCritical",
"(",
"text",
",",
"parent",
"=",
"None",
",",
"help",
"=",
"''",
",",
"title",
"=",
"'Anki'",
")",
":",
"return",
"showInfo",
"(",
"text",
",",
"parent",
",",
"help",
",",
"'critical'",
",",
"title",
"=",
"title",
")"
] |
show a small critical error with an ok button .
|
train
| false
|
9,321
|
def list_clusters_by_datacenter(kwargs=None, call=None):
if (call != 'function'):
raise SaltCloudSystemExit('The list_clusters_by_datacenter function must be called with -f or --function.')
ret = {}
datacenter_name = (kwargs.get('datacenter') if (kwargs and ('datacenter' in kwargs)) else None)
datacenter_properties = ['name']
datacenter_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.Datacenter, datacenter_properties)
for datacenter in datacenter_list:
ret[datacenter['name']] = []
for cluster in datacenter['object'].hostFolder.childEntity:
if isinstance(cluster, vim.ClusterComputeResource):
ret[datacenter['name']].append(cluster.name)
if (datacenter_name and (datacenter_name == datacenter['name'])):
return {'Clusters by Datacenter': {datacenter_name: ret[datacenter_name]}}
return {'Clusters by Datacenter': ret}
|
[
"def",
"list_clusters_by_datacenter",
"(",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'function'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The list_clusters_by_datacenter function must be called with -f or --function.'",
")",
"ret",
"=",
"{",
"}",
"datacenter_name",
"=",
"(",
"kwargs",
".",
"get",
"(",
"'datacenter'",
")",
"if",
"(",
"kwargs",
"and",
"(",
"'datacenter'",
"in",
"kwargs",
")",
")",
"else",
"None",
")",
"datacenter_properties",
"=",
"[",
"'name'",
"]",
"datacenter_list",
"=",
"salt",
".",
"utils",
".",
"vmware",
".",
"get_mors_with_properties",
"(",
"_get_si",
"(",
")",
",",
"vim",
".",
"Datacenter",
",",
"datacenter_properties",
")",
"for",
"datacenter",
"in",
"datacenter_list",
":",
"ret",
"[",
"datacenter",
"[",
"'name'",
"]",
"]",
"=",
"[",
"]",
"for",
"cluster",
"in",
"datacenter",
"[",
"'object'",
"]",
".",
"hostFolder",
".",
"childEntity",
":",
"if",
"isinstance",
"(",
"cluster",
",",
"vim",
".",
"ClusterComputeResource",
")",
":",
"ret",
"[",
"datacenter",
"[",
"'name'",
"]",
"]",
".",
"append",
"(",
"cluster",
".",
"name",
")",
"if",
"(",
"datacenter_name",
"and",
"(",
"datacenter_name",
"==",
"datacenter",
"[",
"'name'",
"]",
")",
")",
":",
"return",
"{",
"'Clusters by Datacenter'",
":",
"{",
"datacenter_name",
":",
"ret",
"[",
"datacenter_name",
"]",
"}",
"}",
"return",
"{",
"'Clusters by Datacenter'",
":",
"ret",
"}"
] |
list clusters for each datacenter; or clusters for a specified datacenter in this vmware environment to list clusters for each datacenter: cli example: .
|
train
| true
|
9,326
|
def test_nm3_fit():
ratio = 'auto'
nm3 = NearMiss(ratio=ratio, random_state=RND_SEED, version=VERSION_NEARMISS)
nm3.fit(X, Y)
assert_equal(nm3.min_c_, 0)
assert_equal(nm3.maj_c_, 2)
assert_equal(nm3.stats_c_[0], 3)
assert_equal(nm3.stats_c_[1], 5)
assert_equal(nm3.stats_c_[2], 7)
|
[
"def",
"test_nm3_fit",
"(",
")",
":",
"ratio",
"=",
"'auto'",
"nm3",
"=",
"NearMiss",
"(",
"ratio",
"=",
"ratio",
",",
"random_state",
"=",
"RND_SEED",
",",
"version",
"=",
"VERSION_NEARMISS",
")",
"nm3",
".",
"fit",
"(",
"X",
",",
"Y",
")",
"assert_equal",
"(",
"nm3",
".",
"min_c_",
",",
"0",
")",
"assert_equal",
"(",
"nm3",
".",
"maj_c_",
",",
"2",
")",
"assert_equal",
"(",
"nm3",
".",
"stats_c_",
"[",
"0",
"]",
",",
"3",
")",
"assert_equal",
"(",
"nm3",
".",
"stats_c_",
"[",
"1",
"]",
",",
"5",
")",
"assert_equal",
"(",
"nm3",
".",
"stats_c_",
"[",
"2",
"]",
",",
"7",
")"
] |
test the fitting method .
|
train
| false
|
9,328
|
@when(u'we send "\\?" command')
def step_send_help(context):
context.cli.sendline(u'\\?')
|
[
"@",
"when",
"(",
"u'we send \"\\\\?\" command'",
")",
"def",
"step_send_help",
"(",
"context",
")",
":",
"context",
".",
"cli",
".",
"sendline",
"(",
"u'\\\\?'",
")"
] |
send ? to see help .
|
train
| false
|
9,332
|
def exception_translated(exception_type):
def _exception_translated(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except exception.PublicIDNotFound as e:
if (exception_type == 'user'):
raise exception.UserNotFound(user_id=str(e))
elif (exception_type == 'group'):
raise exception.GroupNotFound(group_id=str(e))
elif (exception_type == 'assertion'):
raise AssertionError(_('Invalid user / password'))
else:
raise
return wrapper
return _exception_translated
|
[
"def",
"exception_translated",
"(",
"exception_type",
")",
":",
"def",
"_exception_translated",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"try",
":",
"return",
"f",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"except",
"exception",
".",
"PublicIDNotFound",
"as",
"e",
":",
"if",
"(",
"exception_type",
"==",
"'user'",
")",
":",
"raise",
"exception",
".",
"UserNotFound",
"(",
"user_id",
"=",
"str",
"(",
"e",
")",
")",
"elif",
"(",
"exception_type",
"==",
"'group'",
")",
":",
"raise",
"exception",
".",
"GroupNotFound",
"(",
"group_id",
"=",
"str",
"(",
"e",
")",
")",
"elif",
"(",
"exception_type",
"==",
"'assertion'",
")",
":",
"raise",
"AssertionError",
"(",
"_",
"(",
"'Invalid user / password'",
")",
")",
"else",
":",
"raise",
"return",
"wrapper",
"return",
"_exception_translated"
] |
wrap api calls to map to correct exception .
|
train
| false
|
9,334
|
def has_path(G, source, target):
try:
sp = nx.shortest_path(G, source, target)
except nx.NetworkXNoPath:
return False
return True
|
[
"def",
"has_path",
"(",
"G",
",",
"source",
",",
"target",
")",
":",
"try",
":",
"sp",
"=",
"nx",
".",
"shortest_path",
"(",
"G",
",",
"source",
",",
"target",
")",
"except",
"nx",
".",
"NetworkXNoPath",
":",
"return",
"False",
"return",
"True"
] |
generic function taking a simple graph definition as a dictionary .
|
train
| false
|
9,335
|
@verbose
def read_surface(fname, read_metadata=False, return_dict=False, verbose=None):
ret = _get_read_geometry()(fname, read_metadata=read_metadata)
if return_dict:
ret += (dict(rr=ret[0], tris=ret[1], ntri=len(ret[1]), use_tris=ret[1], np=len(ret[0])),)
return ret
|
[
"@",
"verbose",
"def",
"read_surface",
"(",
"fname",
",",
"read_metadata",
"=",
"False",
",",
"return_dict",
"=",
"False",
",",
"verbose",
"=",
"None",
")",
":",
"ret",
"=",
"_get_read_geometry",
"(",
")",
"(",
"fname",
",",
"read_metadata",
"=",
"read_metadata",
")",
"if",
"return_dict",
":",
"ret",
"+=",
"(",
"dict",
"(",
"rr",
"=",
"ret",
"[",
"0",
"]",
",",
"tris",
"=",
"ret",
"[",
"1",
"]",
",",
"ntri",
"=",
"len",
"(",
"ret",
"[",
"1",
"]",
")",
",",
"use_tris",
"=",
"ret",
"[",
"1",
"]",
",",
"np",
"=",
"len",
"(",
"ret",
"[",
"0",
"]",
")",
")",
",",
")",
"return",
"ret"
] |
load a freesurfer surface mesh in triangular format .
|
train
| false
|
9,336
|
@control_command(args=[(u'max', int), (u'min', int)], signature=u'[max [min]]')
def autoscale(state, max=None, min=None):
autoscaler = state.consumer.controller.autoscaler
if autoscaler:
(max_, min_) = autoscaler.update(max, min)
return ok(u'autoscale now max={0} min={1}'.format(max_, min_))
raise ValueError(u'Autoscale not enabled')
|
[
"@",
"control_command",
"(",
"args",
"=",
"[",
"(",
"u'max'",
",",
"int",
")",
",",
"(",
"u'min'",
",",
"int",
")",
"]",
",",
"signature",
"=",
"u'[max [min]]'",
")",
"def",
"autoscale",
"(",
"state",
",",
"max",
"=",
"None",
",",
"min",
"=",
"None",
")",
":",
"autoscaler",
"=",
"state",
".",
"consumer",
".",
"controller",
".",
"autoscaler",
"if",
"autoscaler",
":",
"(",
"max_",
",",
"min_",
")",
"=",
"autoscaler",
".",
"update",
"(",
"max",
",",
"min",
")",
"return",
"ok",
"(",
"u'autoscale now max={0} min={1}'",
".",
"format",
"(",
"max_",
",",
"min_",
")",
")",
"raise",
"ValueError",
"(",
"u'Autoscale not enabled'",
")"
] |
modify autoscale settings .
|
train
| false
|
9,337
|
def pycodestylemod_remove_ignore(ignore_code):
if (ignore_code in pycodestylemod.DEFAULT_IGNORE):
default_ignore = pycodestylemod.DEFAULT_IGNORE.split(',')
default_ignore.remove(ignore_code)
pycodestylemod.DEFAULT_IGNORE = ','.join(default_ignore)
|
[
"def",
"pycodestylemod_remove_ignore",
"(",
"ignore_code",
")",
":",
"if",
"(",
"ignore_code",
"in",
"pycodestylemod",
".",
"DEFAULT_IGNORE",
")",
":",
"default_ignore",
"=",
"pycodestylemod",
".",
"DEFAULT_IGNORE",
".",
"split",
"(",
"','",
")",
"default_ignore",
".",
"remove",
"(",
"ignore_code",
")",
"pycodestylemod",
".",
"DEFAULT_IGNORE",
"=",
"','",
".",
"join",
"(",
"default_ignore",
")"
] |
patch pycodestylemod .
|
train
| false
|
9,338
|
def convert_alpha_characters_in_number(number):
return _normalize_helper(number, _ALPHA_PHONE_MAPPINGS, False)
|
[
"def",
"convert_alpha_characters_in_number",
"(",
"number",
")",
":",
"return",
"_normalize_helper",
"(",
"number",
",",
"_ALPHA_PHONE_MAPPINGS",
",",
"False",
")"
] |
convert alpha chars in a number to their respective digits on a keypad .
|
train
| false
|
9,339
|
def server_add(s_name, s_ip, s_state=None, **connection_args):
ret = True
if server_exists(s_name, **connection_args):
return False
nitro = _connect(**connection_args)
if (nitro is None):
return False
server = NSServer()
server.set_name(s_name)
server.set_ipaddress(s_ip)
if (s_state is not None):
server.set_state(s_state)
try:
NSServer.add(nitro, server)
except NSNitroError as error:
log.debug('netscaler module error - NSServer.add() failed: {0}'.format(error))
ret = False
_disconnect(nitro)
return ret
|
[
"def",
"server_add",
"(",
"s_name",
",",
"s_ip",
",",
"s_state",
"=",
"None",
",",
"**",
"connection_args",
")",
":",
"ret",
"=",
"True",
"if",
"server_exists",
"(",
"s_name",
",",
"**",
"connection_args",
")",
":",
"return",
"False",
"nitro",
"=",
"_connect",
"(",
"**",
"connection_args",
")",
"if",
"(",
"nitro",
"is",
"None",
")",
":",
"return",
"False",
"server",
"=",
"NSServer",
"(",
")",
"server",
".",
"set_name",
"(",
"s_name",
")",
"server",
".",
"set_ipaddress",
"(",
"s_ip",
")",
"if",
"(",
"s_state",
"is",
"not",
"None",
")",
":",
"server",
".",
"set_state",
"(",
"s_state",
")",
"try",
":",
"NSServer",
".",
"add",
"(",
"nitro",
",",
"server",
")",
"except",
"NSNitroError",
"as",
"error",
":",
"log",
".",
"debug",
"(",
"'netscaler module error - NSServer.add() failed: {0}'",
".",
"format",
"(",
"error",
")",
")",
"ret",
"=",
"False",
"_disconnect",
"(",
"nitro",
")",
"return",
"ret"
] |
add a server note: the default server state is enabled cli example: .
|
train
| true
|
9,340
|
def rar3_s2k(psw, salt):
seed = (psw.encode('utf-16le') + salt)
iv = EMPTY
h = sha1()
for i in range(16):
for j in range(16384):
cnt = S_LONG.pack(((i * 16384) + j))
h.update((seed + cnt[:3]))
if (j == 0):
iv += h.digest()[19:20]
key_be = h.digest()[:16]
key_le = pack('<LLLL', *unpack('>LLLL', key_be))
return (key_le, iv)
|
[
"def",
"rar3_s2k",
"(",
"psw",
",",
"salt",
")",
":",
"seed",
"=",
"(",
"psw",
".",
"encode",
"(",
"'utf-16le'",
")",
"+",
"salt",
")",
"iv",
"=",
"EMPTY",
"h",
"=",
"sha1",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"16",
")",
":",
"for",
"j",
"in",
"range",
"(",
"16384",
")",
":",
"cnt",
"=",
"S_LONG",
".",
"pack",
"(",
"(",
"(",
"i",
"*",
"16384",
")",
"+",
"j",
")",
")",
"h",
".",
"update",
"(",
"(",
"seed",
"+",
"cnt",
"[",
":",
"3",
"]",
")",
")",
"if",
"(",
"j",
"==",
"0",
")",
":",
"iv",
"+=",
"h",
".",
"digest",
"(",
")",
"[",
"19",
":",
"20",
"]",
"key_be",
"=",
"h",
".",
"digest",
"(",
")",
"[",
":",
"16",
"]",
"key_le",
"=",
"pack",
"(",
"'<LLLL'",
",",
"*",
"unpack",
"(",
"'>LLLL'",
",",
"key_be",
")",
")",
"return",
"(",
"key_le",
",",
"iv",
")"
] |
string-to-key hash for rar3 .
|
train
| false
|
9,341
|
def test_calc_footprint_2():
fits = get_pkg_data_filename(u'data/sip.fits')
w = wcs.WCS(fits)
axes = (1000, 1051)
ref = np.array([[202.39265216, 47.17756518], [202.7469062, 46.91483312], [203.11487481, 47.14359319], [202.76092671, 47.40745948]])
footprint = w.calc_footprint(axes=axes, undistort=False)
assert_allclose(footprint, ref)
|
[
"def",
"test_calc_footprint_2",
"(",
")",
":",
"fits",
"=",
"get_pkg_data_filename",
"(",
"u'data/sip.fits'",
")",
"w",
"=",
"wcs",
".",
"WCS",
"(",
"fits",
")",
"axes",
"=",
"(",
"1000",
",",
"1051",
")",
"ref",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"202.39265216",
",",
"47.17756518",
"]",
",",
"[",
"202.7469062",
",",
"46.91483312",
"]",
",",
"[",
"203.11487481",
",",
"47.14359319",
"]",
",",
"[",
"202.76092671",
",",
"47.40745948",
"]",
"]",
")",
"footprint",
"=",
"w",
".",
"calc_footprint",
"(",
"axes",
"=",
"axes",
",",
"undistort",
"=",
"False",
")",
"assert_allclose",
"(",
"footprint",
",",
"ref",
")"
] |
test calc_footprint without distortion .
|
train
| false
|
9,342
|
def test_column_conversion_error():
ipac = '| col0 |\n| double |\n 1 2\n'
with pytest.raises(ValueError) as err:
ascii.read(ipac, guess=False, format='ipac')
assert ('Column col0 failed to convert:' in str(err.value))
with pytest.raises(ValueError) as err:
ascii.read(['a b', '1 2'], guess=False, format='basic', converters={'a': []})
assert ('no converters' in str(err.value))
|
[
"def",
"test_column_conversion_error",
"(",
")",
":",
"ipac",
"=",
"'| col0 |\\n| double |\\n 1 2\\n'",
"with",
"pytest",
".",
"raises",
"(",
"ValueError",
")",
"as",
"err",
":",
"ascii",
".",
"read",
"(",
"ipac",
",",
"guess",
"=",
"False",
",",
"format",
"=",
"'ipac'",
")",
"assert",
"(",
"'Column col0 failed to convert:'",
"in",
"str",
"(",
"err",
".",
"value",
")",
")",
"with",
"pytest",
".",
"raises",
"(",
"ValueError",
")",
"as",
"err",
":",
"ascii",
".",
"read",
"(",
"[",
"'a b'",
",",
"'1 2'",
"]",
",",
"guess",
"=",
"False",
",",
"format",
"=",
"'basic'",
",",
"converters",
"=",
"{",
"'a'",
":",
"[",
"]",
"}",
")",
"assert",
"(",
"'no converters'",
"in",
"str",
"(",
"err",
".",
"value",
")",
")"
] |
test that context information from column conversion error is provided .
|
train
| false
|
9,343
|
@register_canonicalize
@gof.local_optimizer([T.abs_])
def local_abs_lift(node):
if ((node.op == T.abs_) and node.inputs[0].owner):
assert (node.nin == 1)
if (node.inputs[0].owner.op == T.mul):
return [T.mul(*[T.abs_(i) for i in node.inputs[0].owner.inputs])]
if (node.inputs[0].owner.op == T.true_div):
i = node.inputs[0].owner.inputs
return [T.true_div(T.abs_(i[0]), T.abs_(i[1]))]
|
[
"@",
"register_canonicalize",
"@",
"gof",
".",
"local_optimizer",
"(",
"[",
"T",
".",
"abs_",
"]",
")",
"def",
"local_abs_lift",
"(",
"node",
")",
":",
"if",
"(",
"(",
"node",
".",
"op",
"==",
"T",
".",
"abs_",
")",
"and",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"owner",
")",
":",
"assert",
"(",
"node",
".",
"nin",
"==",
"1",
")",
"if",
"(",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"owner",
".",
"op",
"==",
"T",
".",
"mul",
")",
":",
"return",
"[",
"T",
".",
"mul",
"(",
"*",
"[",
"T",
".",
"abs_",
"(",
"i",
")",
"for",
"i",
"in",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"owner",
".",
"inputs",
"]",
")",
"]",
"if",
"(",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"owner",
".",
"op",
"==",
"T",
".",
"true_div",
")",
":",
"i",
"=",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"owner",
".",
"inputs",
"return",
"[",
"T",
".",
"true_div",
"(",
"T",
".",
"abs_",
"(",
"i",
"[",
"0",
"]",
")",
",",
"T",
".",
"abs_",
"(",
"i",
"[",
"1",
"]",
")",
")",
"]"
] |
move the abs toward the input .
|
train
| false
|
9,344
|
def CreateCollectionSample():
client = CreateClient()
col = gdata.docs.data.Resource(type='folder', title='My Sample Folder')
col = client.CreateResource(col)
print 'Created collection:', col.title.text, col.resource_id.text
|
[
"def",
"CreateCollectionSample",
"(",
")",
":",
"client",
"=",
"CreateClient",
"(",
")",
"col",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"Resource",
"(",
"type",
"=",
"'folder'",
",",
"title",
"=",
"'My Sample Folder'",
")",
"col",
"=",
"client",
".",
"CreateResource",
"(",
"col",
")",
"print",
"'Created collection:'",
",",
"col",
".",
"title",
".",
"text",
",",
"col",
".",
"resource_id",
".",
"text"
] |
create an empty collection .
|
train
| false
|
9,345
|
def process_destructor(pid, exitcode):
signals.worker_process_shutdown.send(sender=None, pid=pid, exitcode=exitcode)
|
[
"def",
"process_destructor",
"(",
"pid",
",",
"exitcode",
")",
":",
"signals",
".",
"worker_process_shutdown",
".",
"send",
"(",
"sender",
"=",
"None",
",",
"pid",
"=",
"pid",
",",
"exitcode",
"=",
"exitcode",
")"
] |
pool child process destructor .
|
train
| false
|
9,346
|
def runningAsAdmin():
isAdmin = None
if (PLATFORM in ('posix', 'mac')):
_ = os.geteuid()
isAdmin = (isinstance(_, (int, float, long)) and (_ == 0))
elif IS_WIN:
import ctypes
_ = ctypes.windll.shell32.IsUserAnAdmin()
isAdmin = (isinstance(_, (int, float, long)) and (_ == 1))
else:
errMsg = 'sqlmap is not able to check if you are running it '
errMsg += 'as an administrator account on this platform. '
errMsg += 'sqlmap will assume that you are an administrator '
errMsg += 'which is mandatory for the requested takeover attack '
errMsg += 'to work properly'
logger.error(errMsg)
isAdmin = True
return isAdmin
|
[
"def",
"runningAsAdmin",
"(",
")",
":",
"isAdmin",
"=",
"None",
"if",
"(",
"PLATFORM",
"in",
"(",
"'posix'",
",",
"'mac'",
")",
")",
":",
"_",
"=",
"os",
".",
"geteuid",
"(",
")",
"isAdmin",
"=",
"(",
"isinstance",
"(",
"_",
",",
"(",
"int",
",",
"float",
",",
"long",
")",
")",
"and",
"(",
"_",
"==",
"0",
")",
")",
"elif",
"IS_WIN",
":",
"import",
"ctypes",
"_",
"=",
"ctypes",
".",
"windll",
".",
"shell32",
".",
"IsUserAnAdmin",
"(",
")",
"isAdmin",
"=",
"(",
"isinstance",
"(",
"_",
",",
"(",
"int",
",",
"float",
",",
"long",
")",
")",
"and",
"(",
"_",
"==",
"1",
")",
")",
"else",
":",
"errMsg",
"=",
"'sqlmap is not able to check if you are running it '",
"errMsg",
"+=",
"'as an administrator account on this platform. '",
"errMsg",
"+=",
"'sqlmap will assume that you are an administrator '",
"errMsg",
"+=",
"'which is mandatory for the requested takeover attack '",
"errMsg",
"+=",
"'to work properly'",
"logger",
".",
"error",
"(",
"errMsg",
")",
"isAdmin",
"=",
"True",
"return",
"isAdmin"
] |
returns true if the current process is run under admin privileges .
|
train
| false
|
9,348
|
def get_command_line(**kwds):
if getattr(sys, 'frozen', False):
return ([sys.executable, '--multiprocessing-fork'] + [('%s=%r' % item) for item in kwds.items()])
else:
prog = 'from multiprocessing.spawn import spawn_main; spawn_main(%s)'
prog %= ', '.join((('%s=%r' % item) for item in kwds.items()))
opts = util._args_from_interpreter_flags()
return (([_python_exe] + opts) + ['-c', prog, '--multiprocessing-fork'])
|
[
"def",
"get_command_line",
"(",
"**",
"kwds",
")",
":",
"if",
"getattr",
"(",
"sys",
",",
"'frozen'",
",",
"False",
")",
":",
"return",
"(",
"[",
"sys",
".",
"executable",
",",
"'--multiprocessing-fork'",
"]",
"+",
"[",
"(",
"'%s=%r'",
"%",
"item",
")",
"for",
"item",
"in",
"kwds",
".",
"items",
"(",
")",
"]",
")",
"else",
":",
"prog",
"=",
"'from multiprocessing.spawn import spawn_main; spawn_main(%s)'",
"prog",
"%=",
"', '",
".",
"join",
"(",
"(",
"(",
"'%s=%r'",
"%",
"item",
")",
"for",
"item",
"in",
"kwds",
".",
"items",
"(",
")",
")",
")",
"opts",
"=",
"util",
".",
"_args_from_interpreter_flags",
"(",
")",
"return",
"(",
"(",
"[",
"_python_exe",
"]",
"+",
"opts",
")",
"+",
"[",
"'-c'",
",",
"prog",
",",
"'--multiprocessing-fork'",
"]",
")"
] |
returns prefix of command line used for spawning a child process .
|
train
| false
|
9,349
|
def get_current_users():
current_users = {}
for session in Session.objects.all():
try:
uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY)
except SuspiciousOperation:
uid = None
if (uid is not None):
try:
userobj = User.objects.get(pk=uid)
current_users[userobj] = last_access_map.get(userobj.username, {})
except User.DoesNotExist:
LOG.debug(('User with id=%d does not exist' % uid))
return current_users
|
[
"def",
"get_current_users",
"(",
")",
":",
"current_users",
"=",
"{",
"}",
"for",
"session",
"in",
"Session",
".",
"objects",
".",
"all",
"(",
")",
":",
"try",
":",
"uid",
"=",
"session",
".",
"get_decoded",
"(",
")",
".",
"get",
"(",
"django",
".",
"contrib",
".",
"auth",
".",
"SESSION_KEY",
")",
"except",
"SuspiciousOperation",
":",
"uid",
"=",
"None",
"if",
"(",
"uid",
"is",
"not",
"None",
")",
":",
"try",
":",
"userobj",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"uid",
")",
"current_users",
"[",
"userobj",
"]",
"=",
"last_access_map",
".",
"get",
"(",
"userobj",
".",
"username",
",",
"{",
"}",
")",
"except",
"User",
".",
"DoesNotExist",
":",
"LOG",
".",
"debug",
"(",
"(",
"'User with id=%d does not exist'",
"%",
"uid",
")",
")",
"return",
"current_users"
] |
return dictionary of user objects and a dictionary of the users ip address and last access time .
|
train
| false
|
9,351
|
def _MakeUserStr():
user = users.get_current_user()
return (user.nickname() if user else 'noauth')
|
[
"def",
"_MakeUserStr",
"(",
")",
":",
"user",
"=",
"users",
".",
"get_current_user",
"(",
")",
"return",
"(",
"user",
".",
"nickname",
"(",
")",
"if",
"user",
"else",
"'noauth'",
")"
] |
make a user string to use to represent the user .
|
train
| false
|
9,355
|
def arange(*args, **kwargs):
if (len(args) == 1):
start = 0
stop = args[0]
step = 1
elif (len(args) == 2):
start = args[0]
stop = args[1]
step = 1
elif (len(args) == 3):
(start, stop, step) = args
else:
raise TypeError('\n arange takes 3 positional arguments: arange([start], stop, [step])\n ')
if ('chunks' not in kwargs):
raise ValueError('Must supply a chunks= keyword argument')
chunks = kwargs['chunks']
dtype = kwargs.get('dtype', None)
if (dtype is None):
dtype = np.arange(0, 1, step).dtype
num = max(np.ceil(((stop - start) / step)), 0)
chunks = normalize_chunks(chunks, (num,))
name = ('arange-' + tokenize((start, stop, step, chunks, num)))
dsk = {}
elem_count = 0
for (i, bs) in enumerate(chunks[0]):
blockstart = (start + (elem_count * step))
blockstop = (start + ((elem_count + bs) * step))
task = (chunk.arange, blockstart, blockstop, step, bs, dtype)
dsk[(name, i)] = task
elem_count += bs
return Array(dsk, name, chunks, dtype=dtype)
|
[
"def",
"arange",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"len",
"(",
"args",
")",
"==",
"1",
")",
":",
"start",
"=",
"0",
"stop",
"=",
"args",
"[",
"0",
"]",
"step",
"=",
"1",
"elif",
"(",
"len",
"(",
"args",
")",
"==",
"2",
")",
":",
"start",
"=",
"args",
"[",
"0",
"]",
"stop",
"=",
"args",
"[",
"1",
"]",
"step",
"=",
"1",
"elif",
"(",
"len",
"(",
"args",
")",
"==",
"3",
")",
":",
"(",
"start",
",",
"stop",
",",
"step",
")",
"=",
"args",
"else",
":",
"raise",
"TypeError",
"(",
"'\\n arange takes 3 positional arguments: arange([start], stop, [step])\\n '",
")",
"if",
"(",
"'chunks'",
"not",
"in",
"kwargs",
")",
":",
"raise",
"ValueError",
"(",
"'Must supply a chunks= keyword argument'",
")",
"chunks",
"=",
"kwargs",
"[",
"'chunks'",
"]",
"dtype",
"=",
"kwargs",
".",
"get",
"(",
"'dtype'",
",",
"None",
")",
"if",
"(",
"dtype",
"is",
"None",
")",
":",
"dtype",
"=",
"np",
".",
"arange",
"(",
"0",
",",
"1",
",",
"step",
")",
".",
"dtype",
"num",
"=",
"max",
"(",
"np",
".",
"ceil",
"(",
"(",
"(",
"stop",
"-",
"start",
")",
"/",
"step",
")",
")",
",",
"0",
")",
"chunks",
"=",
"normalize_chunks",
"(",
"chunks",
",",
"(",
"num",
",",
")",
")",
"name",
"=",
"(",
"'arange-'",
"+",
"tokenize",
"(",
"(",
"start",
",",
"stop",
",",
"step",
",",
"chunks",
",",
"num",
")",
")",
")",
"dsk",
"=",
"{",
"}",
"elem_count",
"=",
"0",
"for",
"(",
"i",
",",
"bs",
")",
"in",
"enumerate",
"(",
"chunks",
"[",
"0",
"]",
")",
":",
"blockstart",
"=",
"(",
"start",
"+",
"(",
"elem_count",
"*",
"step",
")",
")",
"blockstop",
"=",
"(",
"start",
"+",
"(",
"(",
"elem_count",
"+",
"bs",
")",
"*",
"step",
")",
")",
"task",
"=",
"(",
"chunk",
".",
"arange",
",",
"blockstart",
",",
"blockstop",
",",
"step",
",",
"bs",
",",
"dtype",
")",
"dsk",
"[",
"(",
"name",
",",
"i",
")",
"]",
"=",
"task",
"elem_count",
"+=",
"bs",
"return",
"Array",
"(",
"dsk",
",",
"name",
",",
"chunks",
",",
"dtype",
"=",
"dtype",
")"
] |
creates a 1-d tensor containing a sequence of integers .
|
train
| false
|
9,356
|
def get_user_permissions(user, course_key, org=None):
if (org is None):
org = course_key.org
course_key = course_key.for_branch(None)
else:
assert (course_key is None)
if is_ccx_course(course_key):
return STUDIO_NO_PERMISSIONS
all_perms = (((STUDIO_EDIT_ROLES | STUDIO_VIEW_USERS) | STUDIO_EDIT_CONTENT) | STUDIO_VIEW_CONTENT)
if (GlobalStaff().has_user(user) or OrgInstructorRole(org=org).has_user(user)):
return all_perms
if (course_key and user_has_role(user, CourseInstructorRole(course_key))):
return all_perms
if (OrgStaffRole(org=org).has_user(user) or (course_key and user_has_role(user, CourseStaffRole(course_key)))):
return ((STUDIO_VIEW_USERS | STUDIO_EDIT_CONTENT) | STUDIO_VIEW_CONTENT)
if (course_key and isinstance(course_key, LibraryLocator)):
if (OrgLibraryUserRole(org=org).has_user(user) or user_has_role(user, LibraryUserRole(course_key))):
return (STUDIO_VIEW_USERS | STUDIO_VIEW_CONTENT)
return STUDIO_NO_PERMISSIONS
|
[
"def",
"get_user_permissions",
"(",
"user",
",",
"course_key",
",",
"org",
"=",
"None",
")",
":",
"if",
"(",
"org",
"is",
"None",
")",
":",
"org",
"=",
"course_key",
".",
"org",
"course_key",
"=",
"course_key",
".",
"for_branch",
"(",
"None",
")",
"else",
":",
"assert",
"(",
"course_key",
"is",
"None",
")",
"if",
"is_ccx_course",
"(",
"course_key",
")",
":",
"return",
"STUDIO_NO_PERMISSIONS",
"all_perms",
"=",
"(",
"(",
"(",
"STUDIO_EDIT_ROLES",
"|",
"STUDIO_VIEW_USERS",
")",
"|",
"STUDIO_EDIT_CONTENT",
")",
"|",
"STUDIO_VIEW_CONTENT",
")",
"if",
"(",
"GlobalStaff",
"(",
")",
".",
"has_user",
"(",
"user",
")",
"or",
"OrgInstructorRole",
"(",
"org",
"=",
"org",
")",
".",
"has_user",
"(",
"user",
")",
")",
":",
"return",
"all_perms",
"if",
"(",
"course_key",
"and",
"user_has_role",
"(",
"user",
",",
"CourseInstructorRole",
"(",
"course_key",
")",
")",
")",
":",
"return",
"all_perms",
"if",
"(",
"OrgStaffRole",
"(",
"org",
"=",
"org",
")",
".",
"has_user",
"(",
"user",
")",
"or",
"(",
"course_key",
"and",
"user_has_role",
"(",
"user",
",",
"CourseStaffRole",
"(",
"course_key",
")",
")",
")",
")",
":",
"return",
"(",
"(",
"STUDIO_VIEW_USERS",
"|",
"STUDIO_EDIT_CONTENT",
")",
"|",
"STUDIO_VIEW_CONTENT",
")",
"if",
"(",
"course_key",
"and",
"isinstance",
"(",
"course_key",
",",
"LibraryLocator",
")",
")",
":",
"if",
"(",
"OrgLibraryUserRole",
"(",
"org",
"=",
"org",
")",
".",
"has_user",
"(",
"user",
")",
"or",
"user_has_role",
"(",
"user",
",",
"LibraryUserRole",
"(",
"course_key",
")",
")",
")",
":",
"return",
"(",
"STUDIO_VIEW_USERS",
"|",
"STUDIO_VIEW_CONTENT",
")",
"return",
"STUDIO_NO_PERMISSIONS"
] |
get the bitmask of permissions that this user has in the given course context .
|
train
| false
|
9,357
|
def get_proporsals(source, offset, base='', dot=False):
with RopeContext() as ctx:
try:
proposals = codeassist.code_assist(ctx.project, source, offset, ctx.resource, maxfixes=3, later_locals=False)
except exceptions.ModuleSyntaxError:
proposals = []
proposals = sorted(proposals, key=_sort_proporsals)
out = []
preview = ('preview' in ctx.options.get('completeopt'))
for p in proposals:
out.append(dict(word=p.name, menu=p.type, kind=(p.scope + ':'), info=((p.get_doc() or 'No docs.') if preview else '')))
out = _get_autoimport_proposals(out, ctx, source, offset, dot=dot)
return out
|
[
"def",
"get_proporsals",
"(",
"source",
",",
"offset",
",",
"base",
"=",
"''",
",",
"dot",
"=",
"False",
")",
":",
"with",
"RopeContext",
"(",
")",
"as",
"ctx",
":",
"try",
":",
"proposals",
"=",
"codeassist",
".",
"code_assist",
"(",
"ctx",
".",
"project",
",",
"source",
",",
"offset",
",",
"ctx",
".",
"resource",
",",
"maxfixes",
"=",
"3",
",",
"later_locals",
"=",
"False",
")",
"except",
"exceptions",
".",
"ModuleSyntaxError",
":",
"proposals",
"=",
"[",
"]",
"proposals",
"=",
"sorted",
"(",
"proposals",
",",
"key",
"=",
"_sort_proporsals",
")",
"out",
"=",
"[",
"]",
"preview",
"=",
"(",
"'preview'",
"in",
"ctx",
".",
"options",
".",
"get",
"(",
"'completeopt'",
")",
")",
"for",
"p",
"in",
"proposals",
":",
"out",
".",
"append",
"(",
"dict",
"(",
"word",
"=",
"p",
".",
"name",
",",
"menu",
"=",
"p",
".",
"type",
",",
"kind",
"=",
"(",
"p",
".",
"scope",
"+",
"':'",
")",
",",
"info",
"=",
"(",
"(",
"p",
".",
"get_doc",
"(",
")",
"or",
"'No docs.'",
")",
"if",
"preview",
"else",
"''",
")",
")",
")",
"out",
"=",
"_get_autoimport_proposals",
"(",
"out",
",",
"ctx",
",",
"source",
",",
"offset",
",",
"dot",
"=",
"dot",
")",
"return",
"out"
] |
code assist .
|
train
| false
|
9,359
|
def _update_state(context, node, instance, state):
values = {'task_state': state}
if (not instance):
values['instance_uuid'] = None
values['instance_name'] = None
db.bm_node_update(context, node['id'], values)
|
[
"def",
"_update_state",
"(",
"context",
",",
"node",
",",
"instance",
",",
"state",
")",
":",
"values",
"=",
"{",
"'task_state'",
":",
"state",
"}",
"if",
"(",
"not",
"instance",
")",
":",
"values",
"[",
"'instance_uuid'",
"]",
"=",
"None",
"values",
"[",
"'instance_name'",
"]",
"=",
"None",
"db",
".",
"bm_node_update",
"(",
"context",
",",
"node",
"[",
"'id'",
"]",
",",
"values",
")"
] |
update the node state in baremetal db if instance is not supplied .
|
train
| false
|
9,360
|
def assert_not_equal(x, y, msg=None):
if (x != y):
return
std_msg = ('%s equal to %s' % (_safe_rep(x), _safe_rep(y)))
raise AssertionError(_format_msg(msg, std_msg))
|
[
"def",
"assert_not_equal",
"(",
"x",
",",
"y",
",",
"msg",
"=",
"None",
")",
":",
"if",
"(",
"x",
"!=",
"y",
")",
":",
"return",
"std_msg",
"=",
"(",
"'%s equal to %s'",
"%",
"(",
"_safe_rep",
"(",
"x",
")",
",",
"_safe_rep",
"(",
"y",
")",
")",
")",
"raise",
"AssertionError",
"(",
"_format_msg",
"(",
"msg",
",",
"std_msg",
")",
")"
] |
fail if given objects are equal as determined by the == operator .
|
train
| false
|
9,362
|
def phred_quality_from_solexa(solexa_quality):
if (solexa_quality is None):
return None
if (solexa_quality < (-5)):
warnings.warn(('Solexa quality less than -5 passed, %r' % solexa_quality), BiopythonWarning)
return (10 * log(((10 ** (solexa_quality / 10.0)) + 1), 10))
|
[
"def",
"phred_quality_from_solexa",
"(",
"solexa_quality",
")",
":",
"if",
"(",
"solexa_quality",
"is",
"None",
")",
":",
"return",
"None",
"if",
"(",
"solexa_quality",
"<",
"(",
"-",
"5",
")",
")",
":",
"warnings",
".",
"warn",
"(",
"(",
"'Solexa quality less than -5 passed, %r'",
"%",
"solexa_quality",
")",
",",
"BiopythonWarning",
")",
"return",
"(",
"10",
"*",
"log",
"(",
"(",
"(",
"10",
"**",
"(",
"solexa_quality",
"/",
"10.0",
")",
")",
"+",
"1",
")",
",",
"10",
")",
")"
] |
convert a solexa quality to a phred quality .
|
train
| false
|
9,363
|
def get_running_hubs():
hubs = {}
lockfilename = u''
if (u'SAMP_HUB' in os.environ):
if os.environ[u'SAMP_HUB'].startswith(u'std-lockurl:'):
lockfilename = os.environ[u'SAMP_HUB'][len(u'std-lockurl:'):]
else:
lockfilename = os.path.join(_find_home(), u'.samp')
(hub_is_running, lockfiledict) = check_running_hub(lockfilename)
if hub_is_running:
hubs[lockfilename] = lockfiledict
lockfiledir = u''
lockfiledir = os.path.join(_find_home(), u'.samp-1')
if os.path.isdir(lockfiledir):
for filename in os.listdir(lockfiledir):
if filename.startswith(u'samp-hub'):
lockfilename = os.path.join(lockfiledir, filename)
(hub_is_running, lockfiledict) = check_running_hub(lockfilename)
if hub_is_running:
hubs[lockfilename] = lockfiledict
return hubs
|
[
"def",
"get_running_hubs",
"(",
")",
":",
"hubs",
"=",
"{",
"}",
"lockfilename",
"=",
"u''",
"if",
"(",
"u'SAMP_HUB'",
"in",
"os",
".",
"environ",
")",
":",
"if",
"os",
".",
"environ",
"[",
"u'SAMP_HUB'",
"]",
".",
"startswith",
"(",
"u'std-lockurl:'",
")",
":",
"lockfilename",
"=",
"os",
".",
"environ",
"[",
"u'SAMP_HUB'",
"]",
"[",
"len",
"(",
"u'std-lockurl:'",
")",
":",
"]",
"else",
":",
"lockfilename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"_find_home",
"(",
")",
",",
"u'.samp'",
")",
"(",
"hub_is_running",
",",
"lockfiledict",
")",
"=",
"check_running_hub",
"(",
"lockfilename",
")",
"if",
"hub_is_running",
":",
"hubs",
"[",
"lockfilename",
"]",
"=",
"lockfiledict",
"lockfiledir",
"=",
"u''",
"lockfiledir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"_find_home",
"(",
")",
",",
"u'.samp-1'",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"lockfiledir",
")",
":",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"lockfiledir",
")",
":",
"if",
"filename",
".",
"startswith",
"(",
"u'samp-hub'",
")",
":",
"lockfilename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"lockfiledir",
",",
"filename",
")",
"(",
"hub_is_running",
",",
"lockfiledict",
")",
"=",
"check_running_hub",
"(",
"lockfilename",
")",
"if",
"hub_is_running",
":",
"hubs",
"[",
"lockfilename",
"]",
"=",
"lockfiledict",
"return",
"hubs"
] |
return a dictionary containing the lock-file contents of all the currently running hubs .
|
train
| false
|
9,364
|
def uninstall_app(app):
return __salt__['file.remove'](app)
|
[
"def",
"uninstall_app",
"(",
"app",
")",
":",
"return",
"__salt__",
"[",
"'file.remove'",
"]",
"(",
"app",
")"
] |
uninstall an app file by removing it from the applications directory args: app : the location of the .
|
train
| false
|
9,365
|
def time_format(value, format=None):
return dateformat.time_format(value, get_format((format or 'TIME_FORMAT')))
|
[
"def",
"time_format",
"(",
"value",
",",
"format",
"=",
"None",
")",
":",
"return",
"dateformat",
".",
"time_format",
"(",
"value",
",",
"get_format",
"(",
"(",
"format",
"or",
"'TIME_FORMAT'",
")",
")",
")"
] |
convenience function .
|
train
| false
|
9,366
|
def range4():
raise NotImplementedError
|
[
"def",
"range4",
"(",
")",
":",
"raise",
"NotImplementedError"
] |
this is never be called if plot_directive works as expected .
|
train
| false
|
9,367
|
def get_manager_from_config(admin_password=None, rdbms_type=None):
sect = 'AUTOTEST_WEB'
name = settings.settings.get_value(sect, 'database')
user = settings.settings.get_value(sect, 'user')
password = settings.settings.get_value(sect, 'password')
host = settings.settings.get_value(sect, 'host')
if (rdbms_type is None):
rdbms_type = settings.settings.get_value(sect, 'db_type')
klass = get_manager_class(rdbms_type)
manager = klass(name, admin_password=admin_password, user=user, password=password, host=host)
return manager
|
[
"def",
"get_manager_from_config",
"(",
"admin_password",
"=",
"None",
",",
"rdbms_type",
"=",
"None",
")",
":",
"sect",
"=",
"'AUTOTEST_WEB'",
"name",
"=",
"settings",
".",
"settings",
".",
"get_value",
"(",
"sect",
",",
"'database'",
")",
"user",
"=",
"settings",
".",
"settings",
".",
"get_value",
"(",
"sect",
",",
"'user'",
")",
"password",
"=",
"settings",
".",
"settings",
".",
"get_value",
"(",
"sect",
",",
"'password'",
")",
"host",
"=",
"settings",
".",
"settings",
".",
"get_value",
"(",
"sect",
",",
"'host'",
")",
"if",
"(",
"rdbms_type",
"is",
"None",
")",
":",
"rdbms_type",
"=",
"settings",
".",
"settings",
".",
"get_value",
"(",
"sect",
",",
"'db_type'",
")",
"klass",
"=",
"get_manager_class",
"(",
"rdbms_type",
")",
"manager",
"=",
"klass",
"(",
"name",
",",
"admin_password",
"=",
"admin_password",
",",
"user",
"=",
"user",
",",
"password",
"=",
"password",
",",
"host",
"=",
"host",
")",
"return",
"manager"
] |
returns a manager instance from the information on the configuration file .
|
train
| false
|
9,368
|
def _coeffs_generator(n):
for coeffs in variations([1, (-1)], n, repetition=True):
(yield list(coeffs))
|
[
"def",
"_coeffs_generator",
"(",
"n",
")",
":",
"for",
"coeffs",
"in",
"variations",
"(",
"[",
"1",
",",
"(",
"-",
"1",
")",
"]",
",",
"n",
",",
"repetition",
"=",
"True",
")",
":",
"(",
"yield",
"list",
"(",
"coeffs",
")",
")"
] |
generate coefficients for primitive_element() .
|
train
| false
|
9,370
|
def sanitize_sequence(data):
if (six.PY3 and isinstance(data, collections.abc.MappingView)):
return list(data)
return data
|
[
"def",
"sanitize_sequence",
"(",
"data",
")",
":",
"if",
"(",
"six",
".",
"PY3",
"and",
"isinstance",
"(",
"data",
",",
"collections",
".",
"abc",
".",
"MappingView",
")",
")",
":",
"return",
"list",
"(",
"data",
")",
"return",
"data"
] |
converts dictview object to list .
|
train
| false
|
9,371
|
def text_normalize(text):
if isinstance(text, bytes):
text = codecs.decode(text)
lines = [line.strip() for line in text.splitlines() if line.strip()]
return '\n'.join(lines)
|
[
"def",
"text_normalize",
"(",
"text",
")",
":",
"if",
"isinstance",
"(",
"text",
",",
"bytes",
")",
":",
"text",
"=",
"codecs",
".",
"decode",
"(",
"text",
")",
"lines",
"=",
"[",
"line",
".",
"strip",
"(",
")",
"for",
"line",
"in",
"text",
".",
"splitlines",
"(",
")",
"if",
"line",
".",
"strip",
"(",
")",
"]",
"return",
"'\\n'",
".",
"join",
"(",
"lines",
")"
] |
whitespace normalization: - strip empty lines - strip leading whitespace in a line - strip trailing whitespace in a line - normalize line endings .
|
train
| true
|
9,372
|
def base64pickle(value):
retVal = None
try:
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
except:
warnMsg = 'problem occurred while serializing '
warnMsg += ("instance of a type '%s'" % type(value))
singleTimeWarnMessage(warnMsg)
try:
retVal = base64encode(pickle.dumps(value))
except:
retVal = base64encode(pickle.dumps(str(value), pickle.HIGHEST_PROTOCOL))
return retVal
|
[
"def",
"base64pickle",
"(",
"value",
")",
":",
"retVal",
"=",
"None",
"try",
":",
"retVal",
"=",
"base64encode",
"(",
"pickle",
".",
"dumps",
"(",
"value",
",",
"pickle",
".",
"HIGHEST_PROTOCOL",
")",
")",
"except",
":",
"warnMsg",
"=",
"'problem occurred while serializing '",
"warnMsg",
"+=",
"(",
"\"instance of a type '%s'\"",
"%",
"type",
"(",
"value",
")",
")",
"singleTimeWarnMessage",
"(",
"warnMsg",
")",
"try",
":",
"retVal",
"=",
"base64encode",
"(",
"pickle",
".",
"dumps",
"(",
"value",
")",
")",
"except",
":",
"retVal",
"=",
"base64encode",
"(",
"pickle",
".",
"dumps",
"(",
"str",
"(",
"value",
")",
",",
"pickle",
".",
"HIGHEST_PROTOCOL",
")",
")",
"return",
"retVal"
] |
serializes and encodes to base64 format supplied value .
|
train
| false
|
9,374
|
def unpickleModule(name):
if (name in oldModules):
log.msg(('Module has moved: %s' % name))
name = oldModules[name]
log.msg(name)
return __import__(name, {}, {}, 'x')
|
[
"def",
"unpickleModule",
"(",
"name",
")",
":",
"if",
"(",
"name",
"in",
"oldModules",
")",
":",
"log",
".",
"msg",
"(",
"(",
"'Module has moved: %s'",
"%",
"name",
")",
")",
"name",
"=",
"oldModules",
"[",
"name",
"]",
"log",
".",
"msg",
"(",
"name",
")",
"return",
"__import__",
"(",
"name",
",",
"{",
"}",
",",
"{",
"}",
",",
"'x'",
")"
] |
support function for copy_reg to unpickle module refs .
|
train
| false
|
9,375
|
def getOrderedSurroundingLoops(perimeterWidth, surroundingLoops):
insides = []
orderedSurroundingLoops = []
for loopIndex in xrange(len(surroundingLoops)):
surroundingLoop = surroundingLoops[loopIndex]
otherLoops = []
for beforeIndex in xrange(loopIndex):
otherLoops.append(surroundingLoops[beforeIndex].boundary)
for afterIndex in xrange((loopIndex + 1), len(surroundingLoops)):
otherLoops.append(surroundingLoops[afterIndex].boundary)
if isPathEntirelyInsideLoops(otherLoops, surroundingLoop.boundary):
insides.append(surroundingLoop)
else:
orderedSurroundingLoops.append(surroundingLoop)
for outside in orderedSurroundingLoops:
outside.getFromInsideSurroundings(insides, perimeterWidth)
return orderedSurroundingLoops
|
[
"def",
"getOrderedSurroundingLoops",
"(",
"perimeterWidth",
",",
"surroundingLoops",
")",
":",
"insides",
"=",
"[",
"]",
"orderedSurroundingLoops",
"=",
"[",
"]",
"for",
"loopIndex",
"in",
"xrange",
"(",
"len",
"(",
"surroundingLoops",
")",
")",
":",
"surroundingLoop",
"=",
"surroundingLoops",
"[",
"loopIndex",
"]",
"otherLoops",
"=",
"[",
"]",
"for",
"beforeIndex",
"in",
"xrange",
"(",
"loopIndex",
")",
":",
"otherLoops",
".",
"append",
"(",
"surroundingLoops",
"[",
"beforeIndex",
"]",
".",
"boundary",
")",
"for",
"afterIndex",
"in",
"xrange",
"(",
"(",
"loopIndex",
"+",
"1",
")",
",",
"len",
"(",
"surroundingLoops",
")",
")",
":",
"otherLoops",
".",
"append",
"(",
"surroundingLoops",
"[",
"afterIndex",
"]",
".",
"boundary",
")",
"if",
"isPathEntirelyInsideLoops",
"(",
"otherLoops",
",",
"surroundingLoop",
".",
"boundary",
")",
":",
"insides",
".",
"append",
"(",
"surroundingLoop",
")",
"else",
":",
"orderedSurroundingLoops",
".",
"append",
"(",
"surroundingLoop",
")",
"for",
"outside",
"in",
"orderedSurroundingLoops",
":",
"outside",
".",
"getFromInsideSurroundings",
"(",
"insides",
",",
"perimeterWidth",
")",
"return",
"orderedSurroundingLoops"
] |
get ordered surrounding loops from surrounding loops .
|
train
| false
|
9,376
|
def resolve_dotted(name):
names = name.split('.')
path = names.pop(0)
target = __import__(path)
while names:
segment = names.pop(0)
path += ('.' + segment)
try:
target = getattr(target, segment)
except AttributeError:
__import__(path)
target = getattr(target, segment)
return target
|
[
"def",
"resolve_dotted",
"(",
"name",
")",
":",
"names",
"=",
"name",
".",
"split",
"(",
"'.'",
")",
"path",
"=",
"names",
".",
"pop",
"(",
"0",
")",
"target",
"=",
"__import__",
"(",
"path",
")",
"while",
"names",
":",
"segment",
"=",
"names",
".",
"pop",
"(",
"0",
")",
"path",
"+=",
"(",
"'.'",
"+",
"segment",
")",
"try",
":",
"target",
"=",
"getattr",
"(",
"target",
",",
"segment",
")",
"except",
"AttributeError",
":",
"__import__",
"(",
"path",
")",
"target",
"=",
"getattr",
"(",
"target",
",",
"segment",
")",
"return",
"target"
] |
given the dotted name for a python object .
|
train
| false
|
9,380
|
def _make_model_field(label, initial, choices, multi=True):
if multi:
field = forms.models.ModelMultipleChoiceField(choices, required=False)
field.initial_objs = initial
field.initial = [obj.pk for obj in initial]
field.label = label
else:
field = forms.models.ModelChoiceField(choices, required=False)
field.initial_obj = initial
if initial:
field.initial = initial.pk
return field
|
[
"def",
"_make_model_field",
"(",
"label",
",",
"initial",
",",
"choices",
",",
"multi",
"=",
"True",
")",
":",
"if",
"multi",
":",
"field",
"=",
"forms",
".",
"models",
".",
"ModelMultipleChoiceField",
"(",
"choices",
",",
"required",
"=",
"False",
")",
"field",
".",
"initial_objs",
"=",
"initial",
"field",
".",
"initial",
"=",
"[",
"obj",
".",
"pk",
"for",
"obj",
"in",
"initial",
"]",
"field",
".",
"label",
"=",
"label",
"else",
":",
"field",
"=",
"forms",
".",
"models",
".",
"ModelChoiceField",
"(",
"choices",
",",
"required",
"=",
"False",
")",
"field",
".",
"initial_obj",
"=",
"initial",
"if",
"initial",
":",
"field",
".",
"initial",
"=",
"initial",
".",
"pk",
"return",
"field"
] |
creates multiple choice field with given query object as choices .
|
train
| false
|
9,381
|
def CDLEVENINGDOJISTAR(barDs, count, penetration=(-4e+37)):
return call_talib_with_ohlc(barDs, count, talib.CDLEVENINGDOJISTAR, penetration)
|
[
"def",
"CDLEVENINGDOJISTAR",
"(",
"barDs",
",",
"count",
",",
"penetration",
"=",
"(",
"-",
"4e+37",
")",
")",
":",
"return",
"call_talib_with_ohlc",
"(",
"barDs",
",",
"count",
",",
"talib",
".",
"CDLEVENINGDOJISTAR",
",",
"penetration",
")"
] |
evening doji star .
|
train
| false
|
9,382
|
@pytest.mark.parametrize('parallel', [True, False])
def test_csv_comment_default(parallel, read_csv):
text = 'a,b,c\n#1,2,3\n4,5,6'
table = read_csv(text, parallel=parallel)
expected = Table([['#1', '4'], [2, 5], [3, 6]], names=('a', 'b', 'c'))
assert_table_equal(table, expected)
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'parallel'",
",",
"[",
"True",
",",
"False",
"]",
")",
"def",
"test_csv_comment_default",
"(",
"parallel",
",",
"read_csv",
")",
":",
"text",
"=",
"'a,b,c\\n#1,2,3\\n4,5,6'",
"table",
"=",
"read_csv",
"(",
"text",
",",
"parallel",
"=",
"parallel",
")",
"expected",
"=",
"Table",
"(",
"[",
"[",
"'#1'",
",",
"'4'",
"]",
",",
"[",
"2",
",",
"5",
"]",
",",
"[",
"3",
",",
"6",
"]",
"]",
",",
"names",
"=",
"(",
"'a'",
",",
"'b'",
",",
"'c'",
")",
")",
"assert_table_equal",
"(",
"table",
",",
"expected",
")"
] |
unless the comment parameter is specified .
|
train
| false
|
9,385
|
def generatePwnstaller():
print '\n========================================================================='
print (' Pwnstaller | [Version]: %s' % PWNSTALLER_VERSION)
print '========================================================================='
print ' [Web]: http://harmj0y.net/ | [Twitter]: @harmj0y'
print '========================================================================='
print '\n'
print ' [*] Generating new runw source files...\n'
pwnstallerBuildSource()
print ' [*] Compiling a new runw.exe...\n'
pwnstallerCompileRunw()
print ' [*] Pwnstaller generation complete!\n'
os.system((('mv runw.exe ' + settings.PYINSTALLER_PATH) + 'support/loader/Windows-32bit/'))
print ((' [*] Pwnstaller runw.exe moved to ' + settings.PYINSTALLER_PATH) + '/PyInstaller/bootloader/Windows-32bit/\n')
|
[
"def",
"generatePwnstaller",
"(",
")",
":",
"print",
"'\\n========================================================================='",
"print",
"(",
"' Pwnstaller | [Version]: %s'",
"%",
"PWNSTALLER_VERSION",
")",
"print",
"'========================================================================='",
"print",
"' [Web]: http://harmj0y.net/ | [Twitter]: @harmj0y'",
"print",
"'========================================================================='",
"print",
"'\\n'",
"print",
"' [*] Generating new runw source files...\\n'",
"pwnstallerBuildSource",
"(",
")",
"print",
"' [*] Compiling a new runw.exe...\\n'",
"pwnstallerCompileRunw",
"(",
")",
"print",
"' [*] Pwnstaller generation complete!\\n'",
"os",
".",
"system",
"(",
"(",
"(",
"'mv runw.exe '",
"+",
"settings",
".",
"PYINSTALLER_PATH",
")",
"+",
"'support/loader/Windows-32bit/'",
")",
")",
"print",
"(",
"(",
"' [*] Pwnstaller runw.exe moved to '",
"+",
"settings",
".",
"PYINSTALLER_PATH",
")",
"+",
"'/PyInstaller/bootloader/Windows-32bit/\\n'",
")"
] |
build the randomized source files for pwnstaller .
|
train
| false
|
9,386
|
def _collapse_metadata(mapping_f, collapse_fields):
(mapping_data, header, _) = parse_mapping_file(mapping_f)
sample_md = pd.DataFrame(mapping_data, columns=header)
grouped = sample_md.groupby(collapse_fields)
collapsed_md = grouped.agg((lambda x: tuple(x)))
return collapsed_md
|
[
"def",
"_collapse_metadata",
"(",
"mapping_f",
",",
"collapse_fields",
")",
":",
"(",
"mapping_data",
",",
"header",
",",
"_",
")",
"=",
"parse_mapping_file",
"(",
"mapping_f",
")",
"sample_md",
"=",
"pd",
".",
"DataFrame",
"(",
"mapping_data",
",",
"columns",
"=",
"header",
")",
"grouped",
"=",
"sample_md",
".",
"groupby",
"(",
"collapse_fields",
")",
"collapsed_md",
"=",
"grouped",
".",
"agg",
"(",
"(",
"lambda",
"x",
":",
"tuple",
"(",
"x",
")",
")",
")",
"return",
"collapsed_md"
] |
load a mapping file into a dataframe and then collapse rows parameters mapping_f : file handle or filepath the sample metadata mapping file .
|
train
| false
|
9,387
|
@gen.engine
def GetRegistry(logs_store, path, callback):
contents = ''
contents = (yield gen.Task(logs_store.Get, path, must_exist=False))
if (contents is None):
callback(None)
return
buf = cStringIO.StringIO(contents)
buf.seek(0)
files = []
entries = buf.readlines()
for f in entries:
if f:
files.append(f.strip())
buf.close()
callback(files)
|
[
"@",
"gen",
".",
"engine",
"def",
"GetRegistry",
"(",
"logs_store",
",",
"path",
",",
"callback",
")",
":",
"contents",
"=",
"''",
"contents",
"=",
"(",
"yield",
"gen",
".",
"Task",
"(",
"logs_store",
".",
"Get",
",",
"path",
",",
"must_exist",
"=",
"False",
")",
")",
"if",
"(",
"contents",
"is",
"None",
")",
":",
"callback",
"(",
"None",
")",
"return",
"buf",
"=",
"cStringIO",
".",
"StringIO",
"(",
"contents",
")",
"buf",
".",
"seek",
"(",
"0",
")",
"files",
"=",
"[",
"]",
"entries",
"=",
"buf",
".",
"readlines",
"(",
")",
"for",
"f",
"in",
"entries",
":",
"if",
"f",
":",
"files",
".",
"append",
"(",
"f",
".",
"strip",
"(",
")",
")",
"buf",
".",
"close",
"(",
")",
"callback",
"(",
"files",
")"
] |
open the registry at path in s3 .
|
train
| false
|
9,389
|
def ffmpeg_extract_audio(inputfile, output, bitrate=3000, fps=44100):
cmd = [get_setting('FFMPEG_BINARY'), '-y', '-i', inputfile, '-ab', ('%dk' % bitrate), '-ar', ('%d' % fps), output]
subprocess_call(cmd)
|
[
"def",
"ffmpeg_extract_audio",
"(",
"inputfile",
",",
"output",
",",
"bitrate",
"=",
"3000",
",",
"fps",
"=",
"44100",
")",
":",
"cmd",
"=",
"[",
"get_setting",
"(",
"'FFMPEG_BINARY'",
")",
",",
"'-y'",
",",
"'-i'",
",",
"inputfile",
",",
"'-ab'",
",",
"(",
"'%dk'",
"%",
"bitrate",
")",
",",
"'-ar'",
",",
"(",
"'%d'",
"%",
"fps",
")",
",",
"output",
"]",
"subprocess_call",
"(",
"cmd",
")"
] |
extract the sound from a video file and save it in output .
|
train
| false
|
9,390
|
def call_doctest_bad():
pass
|
[
"def",
"call_doctest_bad",
"(",
")",
":",
"pass"
] |
check that we can still call the decorated functions .
|
train
| false
|
9,391
|
def _iszero(x):
return x.is_zero
|
[
"def",
"_iszero",
"(",
"x",
")",
":",
"return",
"x",
".",
"is_zero"
] |
returns true if x is zero .
|
train
| false
|
9,393
|
def create_certificates(key_dir, name, metadata=None):
(public_key, secret_key) = zmq.curve_keypair()
base_filename = os.path.join(key_dir, name)
secret_key_file = '{0}.key_secret'.format(base_filename)
public_key_file = '{0}.key'.format(base_filename)
now = datetime.datetime.now()
_write_key_file(public_key_file, _cert_public_banner.format(now), public_key)
_write_key_file(secret_key_file, _cert_secret_banner.format(now), public_key, secret_key=secret_key, metadata=metadata)
return (public_key_file, secret_key_file)
|
[
"def",
"create_certificates",
"(",
"key_dir",
",",
"name",
",",
"metadata",
"=",
"None",
")",
":",
"(",
"public_key",
",",
"secret_key",
")",
"=",
"zmq",
".",
"curve_keypair",
"(",
")",
"base_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"key_dir",
",",
"name",
")",
"secret_key_file",
"=",
"'{0}.key_secret'",
".",
"format",
"(",
"base_filename",
")",
"public_key_file",
"=",
"'{0}.key'",
".",
"format",
"(",
"base_filename",
")",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"_write_key_file",
"(",
"public_key_file",
",",
"_cert_public_banner",
".",
"format",
"(",
"now",
")",
",",
"public_key",
")",
"_write_key_file",
"(",
"secret_key_file",
",",
"_cert_secret_banner",
".",
"format",
"(",
"now",
")",
",",
"public_key",
",",
"secret_key",
"=",
"secret_key",
",",
"metadata",
"=",
"metadata",
")",
"return",
"(",
"public_key_file",
",",
"secret_key_file",
")"
] |
create zmq certificates .
|
train
| false
|
9,394
|
def get_ros_home(env=None):
if (env is None):
env = os.environ
if (ROS_HOME in env):
return env[ROS_HOME]
else:
return os.path.join(os.path.expanduser('~'), '.ros')
|
[
"def",
"get_ros_home",
"(",
"env",
"=",
"None",
")",
":",
"if",
"(",
"env",
"is",
"None",
")",
":",
"env",
"=",
"os",
".",
"environ",
"if",
"(",
"ROS_HOME",
"in",
"env",
")",
":",
"return",
"env",
"[",
"ROS_HOME",
"]",
"else",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
",",
"'.ros'",
")"
] |
get directory location of .
|
train
| false
|
9,395
|
@decorators.memoize
def _check_mkfile():
return salt.utils.which('mkfile')
|
[
"@",
"decorators",
".",
"memoize",
"def",
"_check_mkfile",
"(",
")",
":",
"return",
"salt",
".",
"utils",
".",
"which",
"(",
"'mkfile'",
")"
] |
looks to see if mkfile is present on the system .
|
train
| false
|
9,396
|
def rm_(name, recurse=False, profile=None):
rtn = {'name': name, 'result': True, 'changes': {}}
if (not __salt__['etcd.get'](name, profile=profile)):
rtn['comment'] = 'Key does not exist'
return rtn
if __salt__['etcd.rm'](name, recurse=recurse, profile=profile):
rtn['comment'] = 'Key removed'
rtn['changes'] = {name: 'Deleted'}
else:
rtn['comment'] = 'Unable to remove key'
return rtn
|
[
"def",
"rm_",
"(",
"name",
",",
"recurse",
"=",
"False",
",",
"profile",
"=",
"None",
")",
":",
"rtn",
"=",
"{",
"'name'",
":",
"name",
",",
"'result'",
":",
"True",
",",
"'changes'",
":",
"{",
"}",
"}",
"if",
"(",
"not",
"__salt__",
"[",
"'etcd.get'",
"]",
"(",
"name",
",",
"profile",
"=",
"profile",
")",
")",
":",
"rtn",
"[",
"'comment'",
"]",
"=",
"'Key does not exist'",
"return",
"rtn",
"if",
"__salt__",
"[",
"'etcd.rm'",
"]",
"(",
"name",
",",
"recurse",
"=",
"recurse",
",",
"profile",
"=",
"profile",
")",
":",
"rtn",
"[",
"'comment'",
"]",
"=",
"'Key removed'",
"rtn",
"[",
"'changes'",
"]",
"=",
"{",
"name",
":",
"'Deleted'",
"}",
"else",
":",
"rtn",
"[",
"'comment'",
"]",
"=",
"'Unable to remove key'",
"return",
"rtn"
] |
removes a container name container name or id force : false if true .
|
train
| false
|
9,397
|
def show_network(kwargs=None, call=None):
if (call != 'function'):
raise SaltCloudSystemExit('The show_network function must be called with -f or --function.')
if ((not kwargs) or ('name' not in kwargs)):
log.error('Must specify name of network.')
return False
conn = get_conn()
return _expand_item(conn.ex_get_network(kwargs['name']))
|
[
"def",
"show_network",
"(",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'function'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The show_network function must be called with -f or --function.'",
")",
"if",
"(",
"(",
"not",
"kwargs",
")",
"or",
"(",
"'name'",
"not",
"in",
"kwargs",
")",
")",
":",
"log",
".",
"error",
"(",
"'Must specify name of network.'",
")",
"return",
"False",
"conn",
"=",
"get_conn",
"(",
")",
"return",
"_expand_item",
"(",
"conn",
".",
"ex_get_network",
"(",
"kwargs",
"[",
"'name'",
"]",
")",
")"
] |
fetches information of a certain network cli example: .
|
train
| true
|
9,398
|
def psql_query(query, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None):
ret = []
csv_query = 'COPY ({0}) TO STDOUT WITH CSV HEADER'.format(query.strip().rstrip(';'))
cmdret = _psql_prepare_and_run(['-v', 'datestyle=ISO,MDY', '-c', csv_query], runas=runas, host=host, user=user, port=port, maintenance_db=maintenance_db, password=password)
if (cmdret['retcode'] > 0):
return ret
csv_file = StringIO(cmdret['stdout'])
header = {}
for row in csv.reader(csv_file, delimiter=',', quotechar='"'):
if (not row):
continue
if (not header):
header = row
continue
ret.append(dict(zip(header, row)))
return ret
|
[
"def",
"psql_query",
"(",
"query",
",",
"user",
"=",
"None",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
",",
"maintenance_db",
"=",
"None",
",",
"password",
"=",
"None",
",",
"runas",
"=",
"None",
")",
":",
"ret",
"=",
"[",
"]",
"csv_query",
"=",
"'COPY ({0}) TO STDOUT WITH CSV HEADER'",
".",
"format",
"(",
"query",
".",
"strip",
"(",
")",
".",
"rstrip",
"(",
"';'",
")",
")",
"cmdret",
"=",
"_psql_prepare_and_run",
"(",
"[",
"'-v'",
",",
"'datestyle=ISO,MDY'",
",",
"'-c'",
",",
"csv_query",
"]",
",",
"runas",
"=",
"runas",
",",
"host",
"=",
"host",
",",
"user",
"=",
"user",
",",
"port",
"=",
"port",
",",
"maintenance_db",
"=",
"maintenance_db",
",",
"password",
"=",
"password",
")",
"if",
"(",
"cmdret",
"[",
"'retcode'",
"]",
">",
"0",
")",
":",
"return",
"ret",
"csv_file",
"=",
"StringIO",
"(",
"cmdret",
"[",
"'stdout'",
"]",
")",
"header",
"=",
"{",
"}",
"for",
"row",
"in",
"csv",
".",
"reader",
"(",
"csv_file",
",",
"delimiter",
"=",
"','",
",",
"quotechar",
"=",
"'\"'",
")",
":",
"if",
"(",
"not",
"row",
")",
":",
"continue",
"if",
"(",
"not",
"header",
")",
":",
"header",
"=",
"row",
"continue",
"ret",
".",
"append",
"(",
"dict",
"(",
"zip",
"(",
"header",
",",
"row",
")",
")",
")",
"return",
"ret"
] |
run an sql-query and return the results as a list .
|
train
| true
|
9,399
|
def extractSuccessfulJSONResult(response):
result = readBody(response)
result.addCallback(loads)
def getResult(data):
if (response.code > 299):
raise AssertionError((response.code, data))
return data
result.addCallback(getResult)
return result
|
[
"def",
"extractSuccessfulJSONResult",
"(",
"response",
")",
":",
"result",
"=",
"readBody",
"(",
"response",
")",
"result",
".",
"addCallback",
"(",
"loads",
")",
"def",
"getResult",
"(",
"data",
")",
":",
"if",
"(",
"response",
".",
"code",
">",
"299",
")",
":",
"raise",
"AssertionError",
"(",
"(",
"response",
".",
"code",
",",
"data",
")",
")",
"return",
"data",
"result",
".",
"addCallback",
"(",
"getResult",
")",
"return",
"result"
] |
extract a successful api result from a http response .
|
train
| false
|
9,401
|
def to_int(val, default=0):
try:
val = int(val)
except:
val = default
return val
|
[
"def",
"to_int",
"(",
"val",
",",
"default",
"=",
"0",
")",
":",
"try",
":",
"val",
"=",
"int",
"(",
"val",
")",
"except",
":",
"val",
"=",
"default",
"return",
"val"
] |
convert everything but none to an int .
|
train
| false
|
9,403
|
def set_clip_qual_left(sff_data, technical_read_length):
(header, reads) = sff_data
clip_idx = (technical_read_length + 1)
def adjust_read(read):
read['clip_qual_left'] = clip_idx
return read
return (header, imap(adjust_read, reads))
|
[
"def",
"set_clip_qual_left",
"(",
"sff_data",
",",
"technical_read_length",
")",
":",
"(",
"header",
",",
"reads",
")",
"=",
"sff_data",
"clip_idx",
"=",
"(",
"technical_read_length",
"+",
"1",
")",
"def",
"adjust_read",
"(",
"read",
")",
":",
"read",
"[",
"'clip_qual_left'",
"]",
"=",
"clip_idx",
"return",
"read",
"return",
"(",
"header",
",",
"imap",
"(",
"adjust_read",
",",
"reads",
")",
")"
] |
resets the value of clip_qual_left for each read in the sff data .
|
train
| false
|
9,404
|
def _dmp_zz_gcd_interpolate(h, x, v, K):
f = []
while (not dmp_zero_p(h, v)):
g = dmp_ground_trunc(h, x, v, K)
f.insert(0, g)
h = dmp_sub(h, g, v, K)
h = dmp_quo_ground(h, x, v, K)
if K.is_negative(dmp_ground_LC(f, (v + 1), K)):
return dmp_neg(f, (v + 1), K)
else:
return f
|
[
"def",
"_dmp_zz_gcd_interpolate",
"(",
"h",
",",
"x",
",",
"v",
",",
"K",
")",
":",
"f",
"=",
"[",
"]",
"while",
"(",
"not",
"dmp_zero_p",
"(",
"h",
",",
"v",
")",
")",
":",
"g",
"=",
"dmp_ground_trunc",
"(",
"h",
",",
"x",
",",
"v",
",",
"K",
")",
"f",
".",
"insert",
"(",
"0",
",",
"g",
")",
"h",
"=",
"dmp_sub",
"(",
"h",
",",
"g",
",",
"v",
",",
"K",
")",
"h",
"=",
"dmp_quo_ground",
"(",
"h",
",",
"x",
",",
"v",
",",
"K",
")",
"if",
"K",
".",
"is_negative",
"(",
"dmp_ground_LC",
"(",
"f",
",",
"(",
"v",
"+",
"1",
")",
",",
"K",
")",
")",
":",
"return",
"dmp_neg",
"(",
"f",
",",
"(",
"v",
"+",
"1",
")",
",",
"K",
")",
"else",
":",
"return",
"f"
] |
interpolate polynomial gcd from integer gcd .
|
train
| false
|
9,406
|
def stopWorker(basedir, quiet, signame='TERM'):
import signal
os.chdir(basedir)
try:
f = open('twistd.pid', 'rt')
except IOError:
raise WorkerNotRunning()
pid = int(f.read().strip())
signum = getattr(signal, ('SIG' + signame))
timer = 0
try:
os.kill(pid, signum)
except OSError as e:
if (e.errno != 3):
raise
time.sleep(0.1)
while (timer < 10):
try:
os.kill(pid, 0)
except OSError:
if (not quiet):
print(('worker process %d is dead' % pid))
return 0
timer += 1
time.sleep(1)
if (not quiet):
print('never saw process go away')
return 1
|
[
"def",
"stopWorker",
"(",
"basedir",
",",
"quiet",
",",
"signame",
"=",
"'TERM'",
")",
":",
"import",
"signal",
"os",
".",
"chdir",
"(",
"basedir",
")",
"try",
":",
"f",
"=",
"open",
"(",
"'twistd.pid'",
",",
"'rt'",
")",
"except",
"IOError",
":",
"raise",
"WorkerNotRunning",
"(",
")",
"pid",
"=",
"int",
"(",
"f",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
")",
"signum",
"=",
"getattr",
"(",
"signal",
",",
"(",
"'SIG'",
"+",
"signame",
")",
")",
"timer",
"=",
"0",
"try",
":",
"os",
".",
"kill",
"(",
"pid",
",",
"signum",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"(",
"e",
".",
"errno",
"!=",
"3",
")",
":",
"raise",
"time",
".",
"sleep",
"(",
"0.1",
")",
"while",
"(",
"timer",
"<",
"10",
")",
":",
"try",
":",
"os",
".",
"kill",
"(",
"pid",
",",
"0",
")",
"except",
"OSError",
":",
"if",
"(",
"not",
"quiet",
")",
":",
"print",
"(",
"(",
"'worker process %d is dead'",
"%",
"pid",
")",
")",
"return",
"0",
"timer",
"+=",
"1",
"time",
".",
"sleep",
"(",
"1",
")",
"if",
"(",
"not",
"quiet",
")",
":",
"print",
"(",
"'never saw process go away'",
")",
"return",
"1"
] |
stop worker process by sending it a signal .
|
train
| true
|
9,407
|
def deg(r):
return ((r / pi) * 180)
|
[
"def",
"deg",
"(",
"r",
")",
":",
"return",
"(",
"(",
"r",
"/",
"pi",
")",
"*",
"180",
")"
] |
convert radiants in degrees .
|
train
| false
|
9,409
|
def REGION_TO_RATINGS_BODY():
region_to_bodies = {}
for region in ALL_REGIONS_WITH_CONTENT_RATINGS():
ratings_body_label = GENERIC_RATING_REGION_SLUG
if region.ratingsbody:
ratings_body_label = slugify_iarc_name(region.ratingsbody)
region_to_bodies[region.slug] = ratings_body_label
return region_to_bodies
|
[
"def",
"REGION_TO_RATINGS_BODY",
"(",
")",
":",
"region_to_bodies",
"=",
"{",
"}",
"for",
"region",
"in",
"ALL_REGIONS_WITH_CONTENT_RATINGS",
"(",
")",
":",
"ratings_body_label",
"=",
"GENERIC_RATING_REGION_SLUG",
"if",
"region",
".",
"ratingsbody",
":",
"ratings_body_label",
"=",
"slugify_iarc_name",
"(",
"region",
".",
"ratingsbody",
")",
"region_to_bodies",
"[",
"region",
".",
"slug",
"]",
"=",
"ratings_body_label",
"return",
"region_to_bodies"
] |
return a map of region slugs to ratings body labels for use in serializers and to send to fireplace .
|
train
| false
|
9,410
|
def _remove_private_key(content):
prefix = '-----BEGIN PRIVATE KEY-----'
suffix = '-----END PRIVATE KEY-----'
start = content.find(prefix)
if (start < 0):
return content
trim_start = ((start + len(prefix)) + 5)
end = content.find(suffix, trim_start)
if (end < 0):
end = len(content)
trim_end = (end - 5)
if (trim_end <= trim_start):
return content
return ((content[:trim_start] + '...REMOVED...') + content[trim_end:])
|
[
"def",
"_remove_private_key",
"(",
"content",
")",
":",
"prefix",
"=",
"'-----BEGIN PRIVATE KEY-----'",
"suffix",
"=",
"'-----END PRIVATE KEY-----'",
"start",
"=",
"content",
".",
"find",
"(",
"prefix",
")",
"if",
"(",
"start",
"<",
"0",
")",
":",
"return",
"content",
"trim_start",
"=",
"(",
"(",
"start",
"+",
"len",
"(",
"prefix",
")",
")",
"+",
"5",
")",
"end",
"=",
"content",
".",
"find",
"(",
"suffix",
",",
"trim_start",
")",
"if",
"(",
"end",
"<",
"0",
")",
":",
"end",
"=",
"len",
"(",
"content",
")",
"trim_end",
"=",
"(",
"end",
"-",
"5",
")",
"if",
"(",
"trim_end",
"<=",
"trim_start",
")",
":",
"return",
"content",
"return",
"(",
"(",
"content",
"[",
":",
"trim_start",
"]",
"+",
"'...REMOVED...'",
")",
"+",
"content",
"[",
"trim_end",
":",
"]",
")"
] |
remove most of the contents of a private key file for logging .
|
train
| false
|
9,411
|
def CreateResourceInCollectionSample():
client = CreateClient()
col = gdata.docs.data.Resource(type='folder', title='My Sample Folder')
col = client.CreateResource(col)
print 'Created collection:', col.title.text, col.resource_id.text
doc = gdata.docs.data.Resource(type='document', title='My Sample Doc')
doc = client.CreateResource(doc, collection=col)
print 'Created:', doc.title.text, doc.resource_id.text
|
[
"def",
"CreateResourceInCollectionSample",
"(",
")",
":",
"client",
"=",
"CreateClient",
"(",
")",
"col",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"Resource",
"(",
"type",
"=",
"'folder'",
",",
"title",
"=",
"'My Sample Folder'",
")",
"col",
"=",
"client",
".",
"CreateResource",
"(",
"col",
")",
"print",
"'Created collection:'",
",",
"col",
".",
"title",
".",
"text",
",",
"col",
".",
"resource_id",
".",
"text",
"doc",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"Resource",
"(",
"type",
"=",
"'document'",
",",
"title",
"=",
"'My Sample Doc'",
")",
"doc",
"=",
"client",
".",
"CreateResource",
"(",
"doc",
",",
"collection",
"=",
"col",
")",
"print",
"'Created:'",
",",
"doc",
".",
"title",
".",
"text",
",",
"doc",
".",
"resource_id",
".",
"text"
] |
create a collection .
|
train
| false
|
9,412
|
def isgeneratorfunction(object):
return bool(((isfunction(object) or ismethod(object)) and (object.func_code.co_flags & CO_GENERATOR)))
|
[
"def",
"isgeneratorfunction",
"(",
"object",
")",
":",
"return",
"bool",
"(",
"(",
"(",
"isfunction",
"(",
"object",
")",
"or",
"ismethod",
"(",
"object",
")",
")",
"and",
"(",
"object",
".",
"func_code",
".",
"co_flags",
"&",
"CO_GENERATOR",
")",
")",
")"
] |
return true if the object is a user-defined generator function .
|
train
| false
|
9,413
|
def new_secure_hash(text_type=None):
if text_type:
return sha1(text_type).hexdigest()
else:
return sha1()
|
[
"def",
"new_secure_hash",
"(",
"text_type",
"=",
"None",
")",
":",
"if",
"text_type",
":",
"return",
"sha1",
"(",
"text_type",
")",
".",
"hexdigest",
"(",
")",
"else",
":",
"return",
"sha1",
"(",
")"
] |
returns either a sha1 hash object .
|
train
| false
|
9,414
|
def host_axes(*args, **kwargs):
import matplotlib.pyplot as plt
axes_class = kwargs.pop(u'axes_class', None)
host_axes_class = host_axes_class_factory(axes_class)
fig = kwargs.get(u'figure', None)
if (fig is None):
fig = plt.gcf()
ax = host_axes_class(fig, *args, **kwargs)
fig.add_axes(ax)
plt.draw_if_interactive()
return ax
|
[
"def",
"host_axes",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"axes_class",
"=",
"kwargs",
".",
"pop",
"(",
"u'axes_class'",
",",
"None",
")",
"host_axes_class",
"=",
"host_axes_class_factory",
"(",
"axes_class",
")",
"fig",
"=",
"kwargs",
".",
"get",
"(",
"u'figure'",
",",
"None",
")",
"if",
"(",
"fig",
"is",
"None",
")",
":",
"fig",
"=",
"plt",
".",
"gcf",
"(",
")",
"ax",
"=",
"host_axes_class",
"(",
"fig",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"fig",
".",
"add_axes",
"(",
"ax",
")",
"plt",
".",
"draw_if_interactive",
"(",
")",
"return",
"ax"
] |
create axes that can act as a hosts to parasitic axes .
|
train
| false
|
9,416
|
@frappe.whitelist()
def check_attendance_records_exist(course_schedule=None, student_batch=None, date=None):
if course_schedule:
return frappe.get_list(u'Student Attendance', filters={u'course_schedule': course_schedule})
else:
return frappe.get_list(u'Student Attendance', filters={u'student_batch': student_batch, u'date': date})
|
[
"@",
"frappe",
".",
"whitelist",
"(",
")",
"def",
"check_attendance_records_exist",
"(",
"course_schedule",
"=",
"None",
",",
"student_batch",
"=",
"None",
",",
"date",
"=",
"None",
")",
":",
"if",
"course_schedule",
":",
"return",
"frappe",
".",
"get_list",
"(",
"u'Student Attendance'",
",",
"filters",
"=",
"{",
"u'course_schedule'",
":",
"course_schedule",
"}",
")",
"else",
":",
"return",
"frappe",
".",
"get_list",
"(",
"u'Student Attendance'",
",",
"filters",
"=",
"{",
"u'student_batch'",
":",
"student_batch",
",",
"u'date'",
":",
"date",
"}",
")"
] |
check if attendance records are made against the specified course schedule or student batch for given date .
|
train
| false
|
9,418
|
def prepare_template_vals(dtype, compute_capability, rounding=False):
template_vals = dict()
for key in ('inits', 'finish', 'stats_args', 'mul_by_scale', 'atomic_max', 'cvt_out'):
template_vals[key] = ''
template_vals['common'] = _common_divmod
if rounding:
template_vals['common'] += _common_urand_gen
template_vals['common'] += _common_round['nearest'].get(dtype, '')
template_vals['inits'] += (_init_rand_func + _init_rand_round_func)
template_vals['finish'] += _finish_rand_func
mode = 'random'
else:
mode = 'nearest'
template_vals['common'] += _common_round[mode].get(dtype, '')
template_vals['common'] += _common_max_abs
if (((compute_capability[0] == 3) and (compute_capability[1] < 5)) or (compute_capability[0] < 3)):
template_vals['common'] += _common_kepler
template_vals['type'] = _ew_types[dtype]['type']
template_vals['cvt'] = _ew_types[dtype]['cvt']
if (dtype == 'f2'):
template_vals['common'] += _common_fp16_to_fp32
template_vals['cvt_out'] = 'fp32_to_fp16'
elif (dtype == 'x2'):
template_vals['stats_args'] += ', int* maxabs, float scale0'
template_vals['cvt'] = '(float)'
template_vals['cvt_out'] = 'fp32_to_int16'
template_vals['mul_by_scale'] += '1/scale0 *'
template_vals['atomic_max'] += atomic_max
elif (dtype == 'f4'):
pass
else:
raise ValueError(('Did not understand clss dtype ' + str(dtype)))
return template_vals
|
[
"def",
"prepare_template_vals",
"(",
"dtype",
",",
"compute_capability",
",",
"rounding",
"=",
"False",
")",
":",
"template_vals",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"(",
"'inits'",
",",
"'finish'",
",",
"'stats_args'",
",",
"'mul_by_scale'",
",",
"'atomic_max'",
",",
"'cvt_out'",
")",
":",
"template_vals",
"[",
"key",
"]",
"=",
"''",
"template_vals",
"[",
"'common'",
"]",
"=",
"_common_divmod",
"if",
"rounding",
":",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_urand_gen",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_round",
"[",
"'nearest'",
"]",
".",
"get",
"(",
"dtype",
",",
"''",
")",
"template_vals",
"[",
"'inits'",
"]",
"+=",
"(",
"_init_rand_func",
"+",
"_init_rand_round_func",
")",
"template_vals",
"[",
"'finish'",
"]",
"+=",
"_finish_rand_func",
"mode",
"=",
"'random'",
"else",
":",
"mode",
"=",
"'nearest'",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_round",
"[",
"mode",
"]",
".",
"get",
"(",
"dtype",
",",
"''",
")",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_max_abs",
"if",
"(",
"(",
"(",
"compute_capability",
"[",
"0",
"]",
"==",
"3",
")",
"and",
"(",
"compute_capability",
"[",
"1",
"]",
"<",
"5",
")",
")",
"or",
"(",
"compute_capability",
"[",
"0",
"]",
"<",
"3",
")",
")",
":",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_kepler",
"template_vals",
"[",
"'type'",
"]",
"=",
"_ew_types",
"[",
"dtype",
"]",
"[",
"'type'",
"]",
"template_vals",
"[",
"'cvt'",
"]",
"=",
"_ew_types",
"[",
"dtype",
"]",
"[",
"'cvt'",
"]",
"if",
"(",
"dtype",
"==",
"'f2'",
")",
":",
"template_vals",
"[",
"'common'",
"]",
"+=",
"_common_fp16_to_fp32",
"template_vals",
"[",
"'cvt_out'",
"]",
"=",
"'fp32_to_fp16'",
"elif",
"(",
"dtype",
"==",
"'x2'",
")",
":",
"template_vals",
"[",
"'stats_args'",
"]",
"+=",
"', int* maxabs, float scale0'",
"template_vals",
"[",
"'cvt'",
"]",
"=",
"'(float)'",
"template_vals",
"[",
"'cvt_out'",
"]",
"=",
"'fp32_to_int16'",
"template_vals",
"[",
"'mul_by_scale'",
"]",
"+=",
"'1/scale0 *'",
"template_vals",
"[",
"'atomic_max'",
"]",
"+=",
"atomic_max",
"elif",
"(",
"dtype",
"==",
"'f4'",
")",
":",
"pass",
"else",
":",
"raise",
"ValueError",
"(",
"(",
"'Did not understand clss dtype '",
"+",
"str",
"(",
"dtype",
")",
")",
")",
"return",
"template_vals"
] |
set up template code snippets that are reused across multiple kernels .
|
train
| false
|
9,420
|
def S_ISREG(mode):
return (S_IFMT(mode) == S_IFREG)
|
[
"def",
"S_ISREG",
"(",
"mode",
")",
":",
"return",
"(",
"S_IFMT",
"(",
"mode",
")",
"==",
"S_IFREG",
")"
] |
return true if mode is from a regular file .
|
train
| false
|
9,421
|
def log_mean_exp(a):
max_ = a.max(1)
return (max_ + T.log(T.exp((a - max_.dimshuffle(0, 'x'))).mean(1)))
|
[
"def",
"log_mean_exp",
"(",
"a",
")",
":",
"max_",
"=",
"a",
".",
"max",
"(",
"1",
")",
"return",
"(",
"max_",
"+",
"T",
".",
"log",
"(",
"T",
".",
"exp",
"(",
"(",
"a",
"-",
"max_",
".",
"dimshuffle",
"(",
"0",
",",
"'x'",
")",
")",
")",
".",
"mean",
"(",
"1",
")",
")",
")"
] |
compute the log_mean_exp of elements in a tensor .
|
train
| false
|
9,422
|
def filter_strip_join(some_list, sep):
return cstr(sep).join((cstr(a).strip() for a in filter(None, some_list)))
|
[
"def",
"filter_strip_join",
"(",
"some_list",
",",
"sep",
")",
":",
"return",
"cstr",
"(",
"sep",
")",
".",
"join",
"(",
"(",
"cstr",
"(",
"a",
")",
".",
"strip",
"(",
")",
"for",
"a",
"in",
"filter",
"(",
"None",
",",
"some_list",
")",
")",
")"
] |
given a list .
|
train
| false
|
9,423
|
def set_default_for_default_log_levels():
extra_log_level_defaults = ['dogpile=INFO', 'routes=INFO']
log.register_options(CONF)
log.set_defaults(default_log_levels=(log.get_default_log_levels() + extra_log_level_defaults))
|
[
"def",
"set_default_for_default_log_levels",
"(",
")",
":",
"extra_log_level_defaults",
"=",
"[",
"'dogpile=INFO'",
",",
"'routes=INFO'",
"]",
"log",
".",
"register_options",
"(",
"CONF",
")",
"log",
".",
"set_defaults",
"(",
"default_log_levels",
"=",
"(",
"log",
".",
"get_default_log_levels",
"(",
")",
"+",
"extra_log_level_defaults",
")",
")"
] |
set the default for the default_log_levels option for keystone .
|
train
| false
|
9,425
|
def post_event(event, channel=None, username=None, api_url=None, hook=None):
if (not api_url):
api_url = _get_api_url()
if (not hook):
hook = _get_hook()
if (not username):
username = _get_username()
if (not channel):
channel = _get_channel()
if (not event):
log.error('message is a required option.')
log.debug('Event: {0}'.format(str(event)))
log.debug('Event data: {0}'.format(str(event['data'])))
message = 'tag: {0}\r\n'.format(event['tag'])
for (key, value) in event['data'].iteritems():
message += '{0}: {1}\r\n'.format(key, value)
result = post_message(channel, username, message, api_url, hook)
return bool(result)
|
[
"def",
"post_event",
"(",
"event",
",",
"channel",
"=",
"None",
",",
"username",
"=",
"None",
",",
"api_url",
"=",
"None",
",",
"hook",
"=",
"None",
")",
":",
"if",
"(",
"not",
"api_url",
")",
":",
"api_url",
"=",
"_get_api_url",
"(",
")",
"if",
"(",
"not",
"hook",
")",
":",
"hook",
"=",
"_get_hook",
"(",
")",
"if",
"(",
"not",
"username",
")",
":",
"username",
"=",
"_get_username",
"(",
")",
"if",
"(",
"not",
"channel",
")",
":",
"channel",
"=",
"_get_channel",
"(",
")",
"if",
"(",
"not",
"event",
")",
":",
"log",
".",
"error",
"(",
"'message is a required option.'",
")",
"log",
".",
"debug",
"(",
"'Event: {0}'",
".",
"format",
"(",
"str",
"(",
"event",
")",
")",
")",
"log",
".",
"debug",
"(",
"'Event data: {0}'",
".",
"format",
"(",
"str",
"(",
"event",
"[",
"'data'",
"]",
")",
")",
")",
"message",
"=",
"'tag: {0}\\r\\n'",
".",
"format",
"(",
"event",
"[",
"'tag'",
"]",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"event",
"[",
"'data'",
"]",
".",
"iteritems",
"(",
")",
":",
"message",
"+=",
"'{0}: {1}\\r\\n'",
".",
"format",
"(",
"key",
",",
"value",
")",
"result",
"=",
"post_message",
"(",
"channel",
",",
"username",
",",
"message",
",",
"api_url",
",",
"hook",
")",
"return",
"bool",
"(",
"result",
")"
] |
send an event to a mattermost channel .
|
train
| true
|
9,426
|
def get_notification_channel_id(notify_channel, profile='telemetry'):
auth = _auth(profile=profile)
notification_channel_id = _retrieve_channel_id(notify_channel)
if (not notification_channel_id):
log.info('{0} channel does not exist, creating.'.format(notify_channel))
post_url = (_get_telemetry_base(profile) + '/notification-channels')
data = {'_type': 'EmailNotificationChannel', 'name': (notify_channel[:notify_channel.find('@')] + 'EscalationPolicy'), 'email': notify_channel}
response = requests.post(post_url, data=json.dumps(data), headers=auth)
if (response.status_code == 200):
log.info('Successfully created EscalationPolicy {0} with EmailNotificationChannel {1}'.format(data.get('name'), notify_channel))
notification_channel_id = response.json().get('_id')
__context__['telemetry.channels'][notify_channel] = notification_channel_id
else:
raise Exception('Failed to created notification channel {0}'.format(notify_channel))
return notification_channel_id
|
[
"def",
"get_notification_channel_id",
"(",
"notify_channel",
",",
"profile",
"=",
"'telemetry'",
")",
":",
"auth",
"=",
"_auth",
"(",
"profile",
"=",
"profile",
")",
"notification_channel_id",
"=",
"_retrieve_channel_id",
"(",
"notify_channel",
")",
"if",
"(",
"not",
"notification_channel_id",
")",
":",
"log",
".",
"info",
"(",
"'{0} channel does not exist, creating.'",
".",
"format",
"(",
"notify_channel",
")",
")",
"post_url",
"=",
"(",
"_get_telemetry_base",
"(",
"profile",
")",
"+",
"'/notification-channels'",
")",
"data",
"=",
"{",
"'_type'",
":",
"'EmailNotificationChannel'",
",",
"'name'",
":",
"(",
"notify_channel",
"[",
":",
"notify_channel",
".",
"find",
"(",
"'@'",
")",
"]",
"+",
"'EscalationPolicy'",
")",
",",
"'email'",
":",
"notify_channel",
"}",
"response",
"=",
"requests",
".",
"post",
"(",
"post_url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
",",
"headers",
"=",
"auth",
")",
"if",
"(",
"response",
".",
"status_code",
"==",
"200",
")",
":",
"log",
".",
"info",
"(",
"'Successfully created EscalationPolicy {0} with EmailNotificationChannel {1}'",
".",
"format",
"(",
"data",
".",
"get",
"(",
"'name'",
")",
",",
"notify_channel",
")",
")",
"notification_channel_id",
"=",
"response",
".",
"json",
"(",
")",
".",
"get",
"(",
"'_id'",
")",
"__context__",
"[",
"'telemetry.channels'",
"]",
"[",
"notify_channel",
"]",
"=",
"notification_channel_id",
"else",
":",
"raise",
"Exception",
"(",
"'Failed to created notification channel {0}'",
".",
"format",
"(",
"notify_channel",
")",
")",
"return",
"notification_channel_id"
] |
given an email address .
|
train
| true
|
9,428
|
def test_print_op():
b = tensor.fmatrix()
f = theano.function([b], (theano.printing.Print()(b) * 2), mode=mode_with_gpu)
topo = f.maker.fgraph.toposort()
assert (topo[0].op == cuda.gpu_from_host)
assert isinstance(topo[1].op, theano.printing.Print)
assert isinstance(topo[2].op, cuda.GpuElemwise)
assert (topo[3].op == cuda.host_from_gpu)
f(numpy.random.random((5, 5)).astype('float32'))
|
[
"def",
"test_print_op",
"(",
")",
":",
"b",
"=",
"tensor",
".",
"fmatrix",
"(",
")",
"f",
"=",
"theano",
".",
"function",
"(",
"[",
"b",
"]",
",",
"(",
"theano",
".",
"printing",
".",
"Print",
"(",
")",
"(",
"b",
")",
"*",
"2",
")",
",",
"mode",
"=",
"mode_with_gpu",
")",
"topo",
"=",
"f",
".",
"maker",
".",
"fgraph",
".",
"toposort",
"(",
")",
"assert",
"(",
"topo",
"[",
"0",
"]",
".",
"op",
"==",
"cuda",
".",
"gpu_from_host",
")",
"assert",
"isinstance",
"(",
"topo",
"[",
"1",
"]",
".",
"op",
",",
"theano",
".",
"printing",
".",
"Print",
")",
"assert",
"isinstance",
"(",
"topo",
"[",
"2",
"]",
".",
"op",
",",
"cuda",
".",
"GpuElemwise",
")",
"assert",
"(",
"topo",
"[",
"3",
"]",
".",
"op",
"==",
"cuda",
".",
"host_from_gpu",
")",
"f",
"(",
"numpy",
".",
"random",
".",
"random",
"(",
"(",
"5",
",",
"5",
")",
")",
".",
"astype",
"(",
"'float32'",
")",
")"
] |
test that print ops dont block gpu optimization .
|
train
| false
|
9,429
|
def test_accepts_non_list_hosts():
assert (merge('badhosts', [], [], {}) == ['badhosts'])
|
[
"def",
"test_accepts_non_list_hosts",
"(",
")",
":",
"assert",
"(",
"merge",
"(",
"'badhosts'",
",",
"[",
"]",
",",
"[",
"]",
",",
"{",
"}",
")",
"==",
"[",
"'badhosts'",
"]",
")"
] |
coerces given host string to a one-item list .
|
train
| false
|
9,430
|
def get_score(submissions_scores, csm_scores, persisted_block, block):
weight = _get_weight_from_block(persisted_block, block)
(raw_earned, raw_possible, weighted_earned, weighted_possible, attempted) = (_get_score_from_submissions(submissions_scores, block) or _get_score_from_csm(csm_scores, block, weight) or _get_score_from_persisted_or_latest_block(persisted_block, block, weight))
if ((weighted_possible is None) or (weighted_earned is None)):
return None
else:
has_valid_denominator = (weighted_possible > 0.0)
graded = (_get_graded_from_block(persisted_block, block) if has_valid_denominator else False)
return ProblemScore(raw_earned, raw_possible, weighted_earned, weighted_possible, weight, graded, attempted=attempted)
|
[
"def",
"get_score",
"(",
"submissions_scores",
",",
"csm_scores",
",",
"persisted_block",
",",
"block",
")",
":",
"weight",
"=",
"_get_weight_from_block",
"(",
"persisted_block",
",",
"block",
")",
"(",
"raw_earned",
",",
"raw_possible",
",",
"weighted_earned",
",",
"weighted_possible",
",",
"attempted",
")",
"=",
"(",
"_get_score_from_submissions",
"(",
"submissions_scores",
",",
"block",
")",
"or",
"_get_score_from_csm",
"(",
"csm_scores",
",",
"block",
",",
"weight",
")",
"or",
"_get_score_from_persisted_or_latest_block",
"(",
"persisted_block",
",",
"block",
",",
"weight",
")",
")",
"if",
"(",
"(",
"weighted_possible",
"is",
"None",
")",
"or",
"(",
"weighted_earned",
"is",
"None",
")",
")",
":",
"return",
"None",
"else",
":",
"has_valid_denominator",
"=",
"(",
"weighted_possible",
">",
"0.0",
")",
"graded",
"=",
"(",
"_get_graded_from_block",
"(",
"persisted_block",
",",
"block",
")",
"if",
"has_valid_denominator",
"else",
"False",
")",
"return",
"ProblemScore",
"(",
"raw_earned",
",",
"raw_possible",
",",
"weighted_earned",
",",
"weighted_possible",
",",
"weight",
",",
"graded",
",",
"attempted",
"=",
"attempted",
")"
] |
get the score and max_score for the specified user and xblock usage .
|
train
| false
|
9,431
|
def min_sum(cachedir, form='sha1'):
mintar = gen_min(cachedir)
return salt.utils.get_hash(mintar, form)
|
[
"def",
"min_sum",
"(",
"cachedir",
",",
"form",
"=",
"'sha1'",
")",
":",
"mintar",
"=",
"gen_min",
"(",
"cachedir",
")",
"return",
"salt",
".",
"utils",
".",
"get_hash",
"(",
"mintar",
",",
"form",
")"
] |
return the checksum of the current thin tarball .
|
train
| true
|
9,432
|
def non_edges(graph):
if graph.is_directed():
for u in graph:
for v in non_neighbors(graph, u):
(yield (u, v))
else:
nodes = set(graph)
while nodes:
u = nodes.pop()
for v in (nodes - set(graph[u])):
(yield (u, v))
|
[
"def",
"non_edges",
"(",
"graph",
")",
":",
"if",
"graph",
".",
"is_directed",
"(",
")",
":",
"for",
"u",
"in",
"graph",
":",
"for",
"v",
"in",
"non_neighbors",
"(",
"graph",
",",
"u",
")",
":",
"(",
"yield",
"(",
"u",
",",
"v",
")",
")",
"else",
":",
"nodes",
"=",
"set",
"(",
"graph",
")",
"while",
"nodes",
":",
"u",
"=",
"nodes",
".",
"pop",
"(",
")",
"for",
"v",
"in",
"(",
"nodes",
"-",
"set",
"(",
"graph",
"[",
"u",
"]",
")",
")",
":",
"(",
"yield",
"(",
"u",
",",
"v",
")",
")"
] |
returns the non-existent edges in the graph .
|
train
| false
|
9,434
|
def windowbounds(width, height):
global next_window_x, next_window_y
(r, b) = ((next_window_x + width), (next_window_y + height))
if (r > screenbounds[2]):
next_window_x = 16
if (b > screenbounds[3]):
next_window_y = 44
(l, t) = (next_window_x, next_window_y)
(r, b) = ((next_window_x + width), (next_window_y + height))
(next_window_x, next_window_y) = ((next_window_x + 8), (next_window_y + 20))
return (l, t, r, b)
|
[
"def",
"windowbounds",
"(",
"width",
",",
"height",
")",
":",
"global",
"next_window_x",
",",
"next_window_y",
"(",
"r",
",",
"b",
")",
"=",
"(",
"(",
"next_window_x",
"+",
"width",
")",
",",
"(",
"next_window_y",
"+",
"height",
")",
")",
"if",
"(",
"r",
">",
"screenbounds",
"[",
"2",
"]",
")",
":",
"next_window_x",
"=",
"16",
"if",
"(",
"b",
">",
"screenbounds",
"[",
"3",
"]",
")",
":",
"next_window_y",
"=",
"44",
"(",
"l",
",",
"t",
")",
"=",
"(",
"next_window_x",
",",
"next_window_y",
")",
"(",
"r",
",",
"b",
")",
"=",
"(",
"(",
"next_window_x",
"+",
"width",
")",
",",
"(",
"next_window_y",
"+",
"height",
")",
")",
"(",
"next_window_x",
",",
"next_window_y",
")",
"=",
"(",
"(",
"next_window_x",
"+",
"8",
")",
",",
"(",
"next_window_y",
"+",
"20",
")",
")",
"return",
"(",
"l",
",",
"t",
",",
"r",
",",
"b",
")"
] |
return sensible window bounds .
|
train
| false
|
9,435
|
def http_status_message(code):
return HTTP_STATUS_CODES.get(code, '')
|
[
"def",
"http_status_message",
"(",
"code",
")",
":",
"return",
"HTTP_STATUS_CODES",
".",
"get",
"(",
"code",
",",
"''",
")"
] |
maps an http status code to the textual status .
|
train
| false
|
9,436
|
def doctest_suite(module_names):
import doctest
suite = TestSuite()
for mod in load_modules(module_names):
suite.addTest(doctest.DocTestSuite(mod))
return suite
|
[
"def",
"doctest_suite",
"(",
"module_names",
")",
":",
"import",
"doctest",
"suite",
"=",
"TestSuite",
"(",
")",
"for",
"mod",
"in",
"load_modules",
"(",
"module_names",
")",
":",
"suite",
".",
"addTest",
"(",
"doctest",
".",
"DocTestSuite",
"(",
"mod",
")",
")",
"return",
"suite"
] |
makes a test suite from doctests .
|
train
| false
|
9,437
|
def _acquireLock():
if _lock:
_lock.acquire()
|
[
"def",
"_acquireLock",
"(",
")",
":",
"if",
"_lock",
":",
"_lock",
".",
"acquire",
"(",
")"
] |
acquire the module-level lock for serializing access to shared data .
|
train
| false
|
9,439
|
def beneficiary_data():
return s3_rest_controller()
|
[
"def",
"beneficiary_data",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] |
beneficiary data: restful crud controller .
|
train
| false
|
9,441
|
def put_hook(variable, hook_fn, *args):
return printing.Print(global_fn=(lambda _, x: hook_fn(x, *args)))(variable)
|
[
"def",
"put_hook",
"(",
"variable",
",",
"hook_fn",
",",
"*",
"args",
")",
":",
"return",
"printing",
".",
"Print",
"(",
"global_fn",
"=",
"(",
"lambda",
"_",
",",
"x",
":",
"hook_fn",
"(",
"x",
",",
"*",
"args",
")",
")",
")",
"(",
"variable",
")"
] |
put a hook on a theano variables .
|
train
| false
|
9,442
|
def with_feature(f):
wrapper = f
if has_this_feature(f):
def wrapper(*args):
call_feature(args[0])
return f(*args)
return wrapper
|
[
"def",
"with_feature",
"(",
"f",
")",
":",
"wrapper",
"=",
"f",
"if",
"has_this_feature",
"(",
"f",
")",
":",
"def",
"wrapper",
"(",
"*",
"args",
")",
":",
"call_feature",
"(",
"args",
"[",
"0",
"]",
")",
"return",
"f",
"(",
"*",
"args",
")",
"return",
"wrapper"
] |
some decorator .
|
train
| false
|
9,443
|
@conf.commands.register
def chexdump(x, dump=False):
x = str(x)
s = str(', '.join(map((lambda x: ('%#04x' % ord(x))), x)))
if dump:
return s
else:
print s
|
[
"@",
"conf",
".",
"commands",
".",
"register",
"def",
"chexdump",
"(",
"x",
",",
"dump",
"=",
"False",
")",
":",
"x",
"=",
"str",
"(",
"x",
")",
"s",
"=",
"str",
"(",
"', '",
".",
"join",
"(",
"map",
"(",
"(",
"lambda",
"x",
":",
"(",
"'%#04x'",
"%",
"ord",
"(",
"x",
")",
")",
")",
",",
"x",
")",
")",
")",
"if",
"dump",
":",
"return",
"s",
"else",
":",
"print",
"s"
] |
build a per byte hexadecimal representation example: .
|
train
| false
|
9,445
|
def softwareswitch(address='127.0.0.1', port=6633, max_retry_delay=16, dpid=None, extra=None, __INSTANCE__=None):
from pox.core import core
core.register('datapaths', {})
class ExpiringSwitch(ExpireMixin, SoftwareSwitch, ):
pass
do_launch(ExpiringSwitch, address, port, max_retry_delay, dpid, extra_args=extra)
|
[
"def",
"softwareswitch",
"(",
"address",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"6633",
",",
"max_retry_delay",
"=",
"16",
",",
"dpid",
"=",
"None",
",",
"extra",
"=",
"None",
",",
"__INSTANCE__",
"=",
"None",
")",
":",
"from",
"pox",
".",
"core",
"import",
"core",
"core",
".",
"register",
"(",
"'datapaths'",
",",
"{",
"}",
")",
"class",
"ExpiringSwitch",
"(",
"ExpireMixin",
",",
"SoftwareSwitch",
",",
")",
":",
"pass",
"do_launch",
"(",
"ExpiringSwitch",
",",
"address",
",",
"port",
",",
"max_retry_delay",
",",
"dpid",
",",
"extra_args",
"=",
"extra",
")"
] |
launches a softwareswitch not particularly useful .
|
train
| false
|
9,447
|
def user_backends_data(user, backends, storage):
available = list(load_backends(backends).keys())
values = {'associated': [], 'not_associated': available, 'backends': available}
if user_is_authenticated(user):
associated = storage.user.get_social_auth_for_user(user)
not_associated = list((set(available) - set((assoc.provider for assoc in associated))))
values['associated'] = associated
values['not_associated'] = not_associated
return values
|
[
"def",
"user_backends_data",
"(",
"user",
",",
"backends",
",",
"storage",
")",
":",
"available",
"=",
"list",
"(",
"load_backends",
"(",
"backends",
")",
".",
"keys",
"(",
")",
")",
"values",
"=",
"{",
"'associated'",
":",
"[",
"]",
",",
"'not_associated'",
":",
"available",
",",
"'backends'",
":",
"available",
"}",
"if",
"user_is_authenticated",
"(",
"user",
")",
":",
"associated",
"=",
"storage",
".",
"user",
".",
"get_social_auth_for_user",
"(",
"user",
")",
"not_associated",
"=",
"list",
"(",
"(",
"set",
"(",
"available",
")",
"-",
"set",
"(",
"(",
"assoc",
".",
"provider",
"for",
"assoc",
"in",
"associated",
")",
")",
")",
")",
"values",
"[",
"'associated'",
"]",
"=",
"associated",
"values",
"[",
"'not_associated'",
"]",
"=",
"not_associated",
"return",
"values"
] |
will return backends data for given user .
|
train
| false
|
9,448
|
def call_plugin(kind, *args, **kwargs):
if (not (kind in plugin_store)):
raise ValueError(('Invalid function (%s) requested.' % kind))
plugin_funcs = plugin_store[kind]
if (len(plugin_funcs) == 0):
msg = 'No suitable plugin registered for %s.\n\nYou may load I/O plugins with the `skimage.io.use_plugin` command. A list of all available plugins are shown in the `skimage.io` docstring.'
raise RuntimeError((msg % kind))
plugin = kwargs.pop('plugin', None)
if (plugin is None):
(_, func) = plugin_funcs[0]
else:
_load(plugin)
try:
func = [f for (p, f) in plugin_funcs if (p == plugin)][0]
except IndexError:
raise RuntimeError(('Could not find the plugin "%s" for %s.' % (plugin, kind)))
return func(*args, **kwargs)
|
[
"def",
"call_plugin",
"(",
"kind",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"(",
"kind",
"in",
"plugin_store",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Invalid function (%s) requested.'",
"%",
"kind",
")",
")",
"plugin_funcs",
"=",
"plugin_store",
"[",
"kind",
"]",
"if",
"(",
"len",
"(",
"plugin_funcs",
")",
"==",
"0",
")",
":",
"msg",
"=",
"'No suitable plugin registered for %s.\\n\\nYou may load I/O plugins with the `skimage.io.use_plugin` command. A list of all available plugins are shown in the `skimage.io` docstring.'",
"raise",
"RuntimeError",
"(",
"(",
"msg",
"%",
"kind",
")",
")",
"plugin",
"=",
"kwargs",
".",
"pop",
"(",
"'plugin'",
",",
"None",
")",
"if",
"(",
"plugin",
"is",
"None",
")",
":",
"(",
"_",
",",
"func",
")",
"=",
"plugin_funcs",
"[",
"0",
"]",
"else",
":",
"_load",
"(",
"plugin",
")",
"try",
":",
"func",
"=",
"[",
"f",
"for",
"(",
"p",
",",
"f",
")",
"in",
"plugin_funcs",
"if",
"(",
"p",
"==",
"plugin",
")",
"]",
"[",
"0",
"]",
"except",
"IndexError",
":",
"raise",
"RuntimeError",
"(",
"(",
"'Could not find the plugin \"%s\" for %s.'",
"%",
"(",
"plugin",
",",
"kind",
")",
")",
")",
"return",
"func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] |
find the appropriate plugin of kind and execute it .
|
train
| false
|
9,449
|
def find_actions(namespace, action_prefix):
_deprecated()
actions = {}
for (key, value) in iteritems(namespace):
if key.startswith(action_prefix):
actions[key[len(action_prefix):]] = analyse_action(value)
return actions
|
[
"def",
"find_actions",
"(",
"namespace",
",",
"action_prefix",
")",
":",
"_deprecated",
"(",
")",
"actions",
"=",
"{",
"}",
"for",
"(",
"key",
",",
"value",
")",
"in",
"iteritems",
"(",
"namespace",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"action_prefix",
")",
":",
"actions",
"[",
"key",
"[",
"len",
"(",
"action_prefix",
")",
":",
"]",
"]",
"=",
"analyse_action",
"(",
"value",
")",
"return",
"actions"
] |
find all the actions in the namespace .
|
train
| true
|
9,451
|
def endpoint_get(service, profile=None, **connection_args):
auth(profile, **connection_args)
services = service_list(profile, **connection_args)
if (service not in services):
return {'Error': 'Could not find the specified service'}
service_id = services[service]['id']
endpoints = endpoint_list(profile, **connection_args)
for endpoint in endpoints:
if (endpoints[endpoint]['service_id'] == service_id):
return endpoints[endpoint]
return {'Error': 'Could not find endpoint for the specified service'}
|
[
"def",
"endpoint_get",
"(",
"service",
",",
"profile",
"=",
"None",
",",
"**",
"connection_args",
")",
":",
"auth",
"(",
"profile",
",",
"**",
"connection_args",
")",
"services",
"=",
"service_list",
"(",
"profile",
",",
"**",
"connection_args",
")",
"if",
"(",
"service",
"not",
"in",
"services",
")",
":",
"return",
"{",
"'Error'",
":",
"'Could not find the specified service'",
"}",
"service_id",
"=",
"services",
"[",
"service",
"]",
"[",
"'id'",
"]",
"endpoints",
"=",
"endpoint_list",
"(",
"profile",
",",
"**",
"connection_args",
")",
"for",
"endpoint",
"in",
"endpoints",
":",
"if",
"(",
"endpoints",
"[",
"endpoint",
"]",
"[",
"'service_id'",
"]",
"==",
"service_id",
")",
":",
"return",
"endpoints",
"[",
"endpoint",
"]",
"return",
"{",
"'Error'",
":",
"'Could not find endpoint for the specified service'",
"}"
] |
return a specific endpoint cli example: .
|
train
| false
|
9,453
|
def get_navigator_audit_max_file_size():
size = get_conf().get(_CONF_NAVIGATOR_AUDIT_MAX_FILE_SIZE, '100')
return (('%sMB' % size.strip()) if size else '100MB')
|
[
"def",
"get_navigator_audit_max_file_size",
"(",
")",
":",
"size",
"=",
"get_conf",
"(",
")",
".",
"get",
"(",
"_CONF_NAVIGATOR_AUDIT_MAX_FILE_SIZE",
",",
"'100'",
")",
"return",
"(",
"(",
"'%sMB'",
"%",
"size",
".",
"strip",
"(",
")",
")",
"if",
"size",
"else",
"'100MB'",
")"
] |
returns navigator .
|
train
| false
|
9,454
|
def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None, master_config_path=None, master_config=None, providers_config_path=None, providers_config=None, profiles_config_path=None, profiles_config=None):
if path:
config_dir = os.path.dirname(path)
else:
config_dir = salt.syspaths.CONFIG_DIR
overrides = load_config(path, env_var, os.path.join(config_dir, 'cloud'))
if (defaults is None):
defaults = DEFAULT_CLOUD_OPTS.copy()
defaults.update(overrides)
overrides = defaults
overrides.update(salt.config.include_config(overrides['default_include'], path, verbose=False))
include = overrides.get('include', [])
overrides.update(salt.config.include_config(include, path, verbose=True))
if (('master_config' in overrides) and (master_config_path is None)):
master_config_path = overrides['master_config']
elif (('master_config' not in overrides) and (not master_config) and (not master_config_path)):
master_config_path = os.path.join(config_dir, 'master')
master_config_path = _absolute_path(master_config_path, config_dir)
if (('providers_config' in overrides) and (providers_config_path is None)):
providers_config_path = overrides['providers_config']
elif (('providers_config' not in overrides) and (not providers_config) and (not providers_config_path)):
providers_config_path = os.path.join(config_dir, 'cloud.providers')
providers_config_path = _absolute_path(providers_config_path, config_dir)
if (('profiles_config' in overrides) and (profiles_config_path is None)):
profiles_config_path = overrides['profiles_config']
elif (('profiles_config' not in overrides) and (not profiles_config) and (not profiles_config_path)):
profiles_config_path = os.path.join(config_dir, 'cloud.profiles')
profiles_config_path = _absolute_path(profiles_config_path, config_dir)
deploy_scripts_search_path = overrides.get('deploy_scripts_search_path', defaults.get('deploy_scripts_search_path', 'cloud.deploy.d'))
if isinstance(deploy_scripts_search_path, string_types):
deploy_scripts_search_path = [deploy_scripts_search_path]
for (idx, entry) in enumerate(deploy_scripts_search_path[:]):
if (not os.path.isabs(entry)):
entry = os.path.join(os.path.dirname(path), entry)
if os.path.isdir(entry):
deploy_scripts_search_path[idx] = entry
continue
deploy_scripts_search_path.pop(idx)
deploy_scripts_search_path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'cloud', 'deploy')))
overrides.update(deploy_scripts_search_path=tuple(deploy_scripts_search_path))
if ((master_config_path is not None) and (master_config is not None)):
raise salt.exceptions.SaltCloudConfigError('Only pass `master_config` or `master_config_path`, not both.')
elif ((master_config_path is None) and (master_config is None)):
master_config = salt.config.master_config(overrides.get('master_config', os.path.join(salt.syspaths.CONFIG_DIR, 'master')))
elif ((master_config_path is not None) and (master_config is None)):
master_config = salt.config.master_config(master_config_path)
del master_config['cachedir']
master_config.update(overrides)
overrides = master_config
if ((providers_config_path is not None) and (providers_config is not None)):
raise salt.exceptions.SaltCloudConfigError('Only pass `providers_config` or `providers_config_path`, not both.')
elif ((providers_config_path is None) and (providers_config is None)):
providers_config_path = overrides.get('providers_config', os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers'))
if ((profiles_config_path is not None) and (profiles_config is not None)):
raise salt.exceptions.SaltCloudConfigError('Only pass `profiles_config` or `profiles_config_path`, not both.')
elif ((profiles_config_path is None) and (profiles_config is None)):
profiles_config_path = overrides.get('profiles_config', os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.profiles'))
opts = apply_cloud_config(overrides, defaults)
if ('providers' in opts):
if (providers_config is not None):
raise salt.exceptions.SaltCloudConfigError('Do not mix the old cloud providers configuration with the passing a pre-configured providers configuration dictionary.')
if (providers_config_path is not None):
providers_confd = os.path.join(os.path.dirname(providers_config_path), 'cloud.providers.d', '*')
if (os.path.isfile(providers_config_path) or glob.glob(providers_confd)):
raise salt.exceptions.SaltCloudConfigError('Do not mix the old cloud providers configuration with the new one. The providers configuration should now go in the file `{0}` or a separate `*.conf` file within `cloud.providers.d/` which is relative to `{0}`.'.format(os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers')))
providers_config = opts['providers']
elif (providers_config_path is not None):
providers_config = cloud_providers_config(providers_config_path)
opts['providers'] = providers_config
if (profiles_config is None):
profiles_config = vm_profiles_config(profiles_config_path, providers_config)
opts['profiles'] = profiles_config
apply_sdb(opts)
prepend_root_dirs = ['cachedir']
prepend_root_dir(opts, prepend_root_dirs)
return opts
|
[
"def",
"cloud_config",
"(",
"path",
",",
"env_var",
"=",
"'SALT_CLOUD_CONFIG'",
",",
"defaults",
"=",
"None",
",",
"master_config_path",
"=",
"None",
",",
"master_config",
"=",
"None",
",",
"providers_config_path",
"=",
"None",
",",
"providers_config",
"=",
"None",
",",
"profiles_config_path",
"=",
"None",
",",
"profiles_config",
"=",
"None",
")",
":",
"if",
"path",
":",
"config_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
"else",
":",
"config_dir",
"=",
"salt",
".",
"syspaths",
".",
"CONFIG_DIR",
"overrides",
"=",
"load_config",
"(",
"path",
",",
"env_var",
",",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"'cloud'",
")",
")",
"if",
"(",
"defaults",
"is",
"None",
")",
":",
"defaults",
"=",
"DEFAULT_CLOUD_OPTS",
".",
"copy",
"(",
")",
"defaults",
".",
"update",
"(",
"overrides",
")",
"overrides",
"=",
"defaults",
"overrides",
".",
"update",
"(",
"salt",
".",
"config",
".",
"include_config",
"(",
"overrides",
"[",
"'default_include'",
"]",
",",
"path",
",",
"verbose",
"=",
"False",
")",
")",
"include",
"=",
"overrides",
".",
"get",
"(",
"'include'",
",",
"[",
"]",
")",
"overrides",
".",
"update",
"(",
"salt",
".",
"config",
".",
"include_config",
"(",
"include",
",",
"path",
",",
"verbose",
"=",
"True",
")",
")",
"if",
"(",
"(",
"'master_config'",
"in",
"overrides",
")",
"and",
"(",
"master_config_path",
"is",
"None",
")",
")",
":",
"master_config_path",
"=",
"overrides",
"[",
"'master_config'",
"]",
"elif",
"(",
"(",
"'master_config'",
"not",
"in",
"overrides",
")",
"and",
"(",
"not",
"master_config",
")",
"and",
"(",
"not",
"master_config_path",
")",
")",
":",
"master_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"'master'",
")",
"master_config_path",
"=",
"_absolute_path",
"(",
"master_config_path",
",",
"config_dir",
")",
"if",
"(",
"(",
"'providers_config'",
"in",
"overrides",
")",
"and",
"(",
"providers_config_path",
"is",
"None",
")",
")",
":",
"providers_config_path",
"=",
"overrides",
"[",
"'providers_config'",
"]",
"elif",
"(",
"(",
"'providers_config'",
"not",
"in",
"overrides",
")",
"and",
"(",
"not",
"providers_config",
")",
"and",
"(",
"not",
"providers_config_path",
")",
")",
":",
"providers_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"'cloud.providers'",
")",
"providers_config_path",
"=",
"_absolute_path",
"(",
"providers_config_path",
",",
"config_dir",
")",
"if",
"(",
"(",
"'profiles_config'",
"in",
"overrides",
")",
"and",
"(",
"profiles_config_path",
"is",
"None",
")",
")",
":",
"profiles_config_path",
"=",
"overrides",
"[",
"'profiles_config'",
"]",
"elif",
"(",
"(",
"'profiles_config'",
"not",
"in",
"overrides",
")",
"and",
"(",
"not",
"profiles_config",
")",
"and",
"(",
"not",
"profiles_config_path",
")",
")",
":",
"profiles_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"'cloud.profiles'",
")",
"profiles_config_path",
"=",
"_absolute_path",
"(",
"profiles_config_path",
",",
"config_dir",
")",
"deploy_scripts_search_path",
"=",
"overrides",
".",
"get",
"(",
"'deploy_scripts_search_path'",
",",
"defaults",
".",
"get",
"(",
"'deploy_scripts_search_path'",
",",
"'cloud.deploy.d'",
")",
")",
"if",
"isinstance",
"(",
"deploy_scripts_search_path",
",",
"string_types",
")",
":",
"deploy_scripts_search_path",
"=",
"[",
"deploy_scripts_search_path",
"]",
"for",
"(",
"idx",
",",
"entry",
")",
"in",
"enumerate",
"(",
"deploy_scripts_search_path",
"[",
":",
"]",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"entry",
")",
")",
":",
"entry",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
",",
"entry",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"entry",
")",
":",
"deploy_scripts_search_path",
"[",
"idx",
"]",
"=",
"entry",
"continue",
"deploy_scripts_search_path",
".",
"pop",
"(",
"idx",
")",
"deploy_scripts_search_path",
".",
"append",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'..'",
",",
"'cloud'",
",",
"'deploy'",
")",
")",
")",
"overrides",
".",
"update",
"(",
"deploy_scripts_search_path",
"=",
"tuple",
"(",
"deploy_scripts_search_path",
")",
")",
"if",
"(",
"(",
"master_config_path",
"is",
"not",
"None",
")",
"and",
"(",
"master_config",
"is",
"not",
"None",
")",
")",
":",
"raise",
"salt",
".",
"exceptions",
".",
"SaltCloudConfigError",
"(",
"'Only pass `master_config` or `master_config_path`, not both.'",
")",
"elif",
"(",
"(",
"master_config_path",
"is",
"None",
")",
"and",
"(",
"master_config",
"is",
"None",
")",
")",
":",
"master_config",
"=",
"salt",
".",
"config",
".",
"master_config",
"(",
"overrides",
".",
"get",
"(",
"'master_config'",
",",
"os",
".",
"path",
".",
"join",
"(",
"salt",
".",
"syspaths",
".",
"CONFIG_DIR",
",",
"'master'",
")",
")",
")",
"elif",
"(",
"(",
"master_config_path",
"is",
"not",
"None",
")",
"and",
"(",
"master_config",
"is",
"None",
")",
")",
":",
"master_config",
"=",
"salt",
".",
"config",
".",
"master_config",
"(",
"master_config_path",
")",
"del",
"master_config",
"[",
"'cachedir'",
"]",
"master_config",
".",
"update",
"(",
"overrides",
")",
"overrides",
"=",
"master_config",
"if",
"(",
"(",
"providers_config_path",
"is",
"not",
"None",
")",
"and",
"(",
"providers_config",
"is",
"not",
"None",
")",
")",
":",
"raise",
"salt",
".",
"exceptions",
".",
"SaltCloudConfigError",
"(",
"'Only pass `providers_config` or `providers_config_path`, not both.'",
")",
"elif",
"(",
"(",
"providers_config_path",
"is",
"None",
")",
"and",
"(",
"providers_config",
"is",
"None",
")",
")",
":",
"providers_config_path",
"=",
"overrides",
".",
"get",
"(",
"'providers_config'",
",",
"os",
".",
"path",
".",
"join",
"(",
"salt",
".",
"syspaths",
".",
"CONFIG_DIR",
",",
"'cloud.providers'",
")",
")",
"if",
"(",
"(",
"profiles_config_path",
"is",
"not",
"None",
")",
"and",
"(",
"profiles_config",
"is",
"not",
"None",
")",
")",
":",
"raise",
"salt",
".",
"exceptions",
".",
"SaltCloudConfigError",
"(",
"'Only pass `profiles_config` or `profiles_config_path`, not both.'",
")",
"elif",
"(",
"(",
"profiles_config_path",
"is",
"None",
")",
"and",
"(",
"profiles_config",
"is",
"None",
")",
")",
":",
"profiles_config_path",
"=",
"overrides",
".",
"get",
"(",
"'profiles_config'",
",",
"os",
".",
"path",
".",
"join",
"(",
"salt",
".",
"syspaths",
".",
"CONFIG_DIR",
",",
"'cloud.profiles'",
")",
")",
"opts",
"=",
"apply_cloud_config",
"(",
"overrides",
",",
"defaults",
")",
"if",
"(",
"'providers'",
"in",
"opts",
")",
":",
"if",
"(",
"providers_config",
"is",
"not",
"None",
")",
":",
"raise",
"salt",
".",
"exceptions",
".",
"SaltCloudConfigError",
"(",
"'Do not mix the old cloud providers configuration with the passing a pre-configured providers configuration dictionary.'",
")",
"if",
"(",
"providers_config_path",
"is",
"not",
"None",
")",
":",
"providers_confd",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"providers_config_path",
")",
",",
"'cloud.providers.d'",
",",
"'*'",
")",
"if",
"(",
"os",
".",
"path",
".",
"isfile",
"(",
"providers_config_path",
")",
"or",
"glob",
".",
"glob",
"(",
"providers_confd",
")",
")",
":",
"raise",
"salt",
".",
"exceptions",
".",
"SaltCloudConfigError",
"(",
"'Do not mix the old cloud providers configuration with the new one. The providers configuration should now go in the file `{0}` or a separate `*.conf` file within `cloud.providers.d/` which is relative to `{0}`.'",
".",
"format",
"(",
"os",
".",
"path",
".",
"join",
"(",
"salt",
".",
"syspaths",
".",
"CONFIG_DIR",
",",
"'cloud.providers'",
")",
")",
")",
"providers_config",
"=",
"opts",
"[",
"'providers'",
"]",
"elif",
"(",
"providers_config_path",
"is",
"not",
"None",
")",
":",
"providers_config",
"=",
"cloud_providers_config",
"(",
"providers_config_path",
")",
"opts",
"[",
"'providers'",
"]",
"=",
"providers_config",
"if",
"(",
"profiles_config",
"is",
"None",
")",
":",
"profiles_config",
"=",
"vm_profiles_config",
"(",
"profiles_config_path",
",",
"providers_config",
")",
"opts",
"[",
"'profiles'",
"]",
"=",
"profiles_config",
"apply_sdb",
"(",
"opts",
")",
"prepend_root_dirs",
"=",
"[",
"'cachedir'",
"]",
"prepend_root_dir",
"(",
"opts",
",",
"prepend_root_dirs",
")",
"return",
"opts"
] |
provides a configuration object as a proxy to environment variables .
|
train
| true
|
9,456
|
def _server_version(engine):
conn = engine.connect()
version = getattr(engine.dialect, 'server_version_info', ())
conn.close()
return version
|
[
"def",
"_server_version",
"(",
"engine",
")",
":",
"conn",
"=",
"engine",
".",
"connect",
"(",
")",
"version",
"=",
"getattr",
"(",
"engine",
".",
"dialect",
",",
"'server_version_info'",
",",
"(",
")",
")",
"conn",
".",
"close",
"(",
")",
"return",
"version"
] |
return a server_version_info tuple .
|
train
| false
|
9,457
|
def embed_code_links(app, exception):
try:
if (exception is not None):
return
print 'Embedding documentation hyperlinks in examples..'
doc_resolvers = {}
doc_resolvers['sklearn'] = SphinxDocLinkResolver('http://scikit-learn.org/stable')
doc_resolvers['matplotlib'] = SphinxDocLinkResolver('http://matplotlib.org')
doc_resolvers['numpy'] = SphinxDocLinkResolver('http://docs.scipy.org/doc/numpy-1.6.0')
doc_resolvers['scipy'] = SphinxDocLinkResolver('http://docs.scipy.org/doc/scipy-0.11.0/reference')
doc_resolvers['pandas'] = SphinxDocLinkResolver('http://pandas.pydata.org')
doc_resolvers['statsmodels'] = SphinxDocLinkResolver('http://statsmodels.sourceforge.net')
example_dir = os.path.join(app.builder.srcdir, 'auto_examples')
html_example_dir = os.path.abspath(os.path.join(app.builder.outdir, 'auto_examples'))
link_pattern = '<a href="%s">%s</a>'
orig_pattern = '<span class="n">%s</span>'
period = '<span class="o">.</span>'
for (dirpath, _, filenames) in os.walk(html_example_dir):
for fname in filenames:
print (' DCTB processing: %s' % fname)
full_fname = os.path.join(html_example_dir, dirpath, fname)
subpath = dirpath[(len(html_example_dir) + 1):]
pickle_fname = os.path.join(example_dir, subpath, (fname[:(-5)] + '_codeobj.pickle'))
if os.path.exists(pickle_fname):
with open(pickle_fname, 'rb') as fid:
example_code_obj = cPickle.load(fid)
fid.close()
str_repl = {}
for (name, cobj) in example_code_obj.iteritems():
this_module = cobj['module'].split('.')[0]
if (this_module not in doc_resolvers):
continue
link = doc_resolvers[this_module].resolve(cobj, full_fname)
if (link is not None):
parts = name.split('.')
name_html = (orig_pattern % parts[0])
for part in parts[1:]:
name_html += (period + (orig_pattern % part))
str_repl[name_html] = (link_pattern % (link, name_html))
if (len(str_repl) > 0):
with open(full_fname, 'rb') as fid:
lines_in = fid.readlines()
with open(full_fname, 'wb') as fid:
for line in lines_in:
line = line.decode('utf-8')
for (name, link) in str_repl.iteritems():
line = line.replace(name, link)
fid.write(line.encode('utf-8'))
except urllib2.HTTPError as e:
print 'The following HTTP Error has occurred:\n'
print e.code
except urllib2.URLError as e:
print '\n...\nWarning: Embedding the documentation hyperlinks requires internet access.\nPlease check your network connection.\nUnable to continue embedding due to a URL Error: \n'
print e.args
print '[done]'
|
[
"def",
"embed_code_links",
"(",
"app",
",",
"exception",
")",
":",
"try",
":",
"if",
"(",
"exception",
"is",
"not",
"None",
")",
":",
"return",
"print",
"'Embedding documentation hyperlinks in examples..'",
"doc_resolvers",
"=",
"{",
"}",
"doc_resolvers",
"[",
"'sklearn'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://scikit-learn.org/stable'",
")",
"doc_resolvers",
"[",
"'matplotlib'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://matplotlib.org'",
")",
"doc_resolvers",
"[",
"'numpy'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://docs.scipy.org/doc/numpy-1.6.0'",
")",
"doc_resolvers",
"[",
"'scipy'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://docs.scipy.org/doc/scipy-0.11.0/reference'",
")",
"doc_resolvers",
"[",
"'pandas'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://pandas.pydata.org'",
")",
"doc_resolvers",
"[",
"'statsmodels'",
"]",
"=",
"SphinxDocLinkResolver",
"(",
"'http://statsmodels.sourceforge.net'",
")",
"example_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app",
".",
"builder",
".",
"srcdir",
",",
"'auto_examples'",
")",
"html_example_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"app",
".",
"builder",
".",
"outdir",
",",
"'auto_examples'",
")",
")",
"link_pattern",
"=",
"'<a href=\"%s\">%s</a>'",
"orig_pattern",
"=",
"'<span class=\"n\">%s</span>'",
"period",
"=",
"'<span class=\"o\">.</span>'",
"for",
"(",
"dirpath",
",",
"_",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"html_example_dir",
")",
":",
"for",
"fname",
"in",
"filenames",
":",
"print",
"(",
"' DCTB processing: %s'",
"%",
"fname",
")",
"full_fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"html_example_dir",
",",
"dirpath",
",",
"fname",
")",
"subpath",
"=",
"dirpath",
"[",
"(",
"len",
"(",
"html_example_dir",
")",
"+",
"1",
")",
":",
"]",
"pickle_fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"example_dir",
",",
"subpath",
",",
"(",
"fname",
"[",
":",
"(",
"-",
"5",
")",
"]",
"+",
"'_codeobj.pickle'",
")",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pickle_fname",
")",
":",
"with",
"open",
"(",
"pickle_fname",
",",
"'rb'",
")",
"as",
"fid",
":",
"example_code_obj",
"=",
"cPickle",
".",
"load",
"(",
"fid",
")",
"fid",
".",
"close",
"(",
")",
"str_repl",
"=",
"{",
"}",
"for",
"(",
"name",
",",
"cobj",
")",
"in",
"example_code_obj",
".",
"iteritems",
"(",
")",
":",
"this_module",
"=",
"cobj",
"[",
"'module'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"if",
"(",
"this_module",
"not",
"in",
"doc_resolvers",
")",
":",
"continue",
"link",
"=",
"doc_resolvers",
"[",
"this_module",
"]",
".",
"resolve",
"(",
"cobj",
",",
"full_fname",
")",
"if",
"(",
"link",
"is",
"not",
"None",
")",
":",
"parts",
"=",
"name",
".",
"split",
"(",
"'.'",
")",
"name_html",
"=",
"(",
"orig_pattern",
"%",
"parts",
"[",
"0",
"]",
")",
"for",
"part",
"in",
"parts",
"[",
"1",
":",
"]",
":",
"name_html",
"+=",
"(",
"period",
"+",
"(",
"orig_pattern",
"%",
"part",
")",
")",
"str_repl",
"[",
"name_html",
"]",
"=",
"(",
"link_pattern",
"%",
"(",
"link",
",",
"name_html",
")",
")",
"if",
"(",
"len",
"(",
"str_repl",
")",
">",
"0",
")",
":",
"with",
"open",
"(",
"full_fname",
",",
"'rb'",
")",
"as",
"fid",
":",
"lines_in",
"=",
"fid",
".",
"readlines",
"(",
")",
"with",
"open",
"(",
"full_fname",
",",
"'wb'",
")",
"as",
"fid",
":",
"for",
"line",
"in",
"lines_in",
":",
"line",
"=",
"line",
".",
"decode",
"(",
"'utf-8'",
")",
"for",
"(",
"name",
",",
"link",
")",
"in",
"str_repl",
".",
"iteritems",
"(",
")",
":",
"line",
"=",
"line",
".",
"replace",
"(",
"name",
",",
"link",
")",
"fid",
".",
"write",
"(",
"line",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"except",
"urllib2",
".",
"HTTPError",
"as",
"e",
":",
"print",
"'The following HTTP Error has occurred:\\n'",
"print",
"e",
".",
"code",
"except",
"urllib2",
".",
"URLError",
"as",
"e",
":",
"print",
"'\\n...\\nWarning: Embedding the documentation hyperlinks requires internet access.\\nPlease check your network connection.\\nUnable to continue embedding due to a URL Error: \\n'",
"print",
"e",
".",
"args",
"print",
"'[done]'"
] |
embed hyperlinks to documentation into example code .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.