id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
50,617
|
def data_url(mimetype, data):
b64 = base64.b64encode(data).decode('ascii')
url = QUrl('data:{};base64,{}'.format(mimetype, b64))
qtutils.ensure_valid(url)
return url
|
[
"def",
"data_url",
"(",
"mimetype",
",",
"data",
")",
":",
"b64",
"=",
"base64",
".",
"b64encode",
"(",
"data",
")",
".",
"decode",
"(",
"'ascii'",
")",
"url",
"=",
"QUrl",
"(",
"'data:{};base64,{}'",
".",
"format",
"(",
"mimetype",
",",
"b64",
")",
")",
"qtutils",
".",
"ensure_valid",
"(",
"url",
")",
"return",
"url"
] |
get a data: qurl for the given data .
|
train
| false
|
50,618
|
def test_prewitt_h_horizontal():
(i, j) = np.mgrid[(-5):6, (-5):6]
image = (i >= 0).astype(float)
result = filters.prewitt_h(image)
i[(np.abs(j) == 5)] = 10000
assert np.all((result[(i == 0)] == 1))
assert_allclose(result[(np.abs(i) > 1)], 0, atol=1e-10)
|
[
"def",
"test_prewitt_h_horizontal",
"(",
")",
":",
"(",
"i",
",",
"j",
")",
"=",
"np",
".",
"mgrid",
"[",
"(",
"-",
"5",
")",
":",
"6",
",",
"(",
"-",
"5",
")",
":",
"6",
"]",
"image",
"=",
"(",
"i",
">=",
"0",
")",
".",
"astype",
"(",
"float",
")",
"result",
"=",
"filters",
".",
"prewitt_h",
"(",
"image",
")",
"i",
"[",
"(",
"np",
".",
"abs",
"(",
"j",
")",
"==",
"5",
")",
"]",
"=",
"10000",
"assert",
"np",
".",
"all",
"(",
"(",
"result",
"[",
"(",
"i",
"==",
"0",
")",
"]",
"==",
"1",
")",
")",
"assert_allclose",
"(",
"result",
"[",
"(",
"np",
".",
"abs",
"(",
"i",
")",
">",
"1",
")",
"]",
",",
"0",
",",
"atol",
"=",
"1e-10",
")"
] |
horizontal prewitt on an edge should be a horizontal line .
|
train
| false
|
50,621
|
def _ssl_dn_extract_info(dn_string):
search_string = re.search('/emailAddress=(.*)@([^/]+)', dn_string)
if search_string:
user = search_string.group(1)
email = ('%s@%s' % (user, search_string.group(2)))
else:
raise ValueError
search_string = re.search('/CN=([^/]+)/', dn_string)
if search_string:
fullname = search_string.group(1)
else:
raise ValueError
return (user, email, fullname)
|
[
"def",
"_ssl_dn_extract_info",
"(",
"dn_string",
")",
":",
"search_string",
"=",
"re",
".",
"search",
"(",
"'/emailAddress=(.*)@([^/]+)'",
",",
"dn_string",
")",
"if",
"search_string",
":",
"user",
"=",
"search_string",
".",
"group",
"(",
"1",
")",
"email",
"=",
"(",
"'%s@%s'",
"%",
"(",
"user",
",",
"search_string",
".",
"group",
"(",
"2",
")",
")",
")",
"else",
":",
"raise",
"ValueError",
"search_string",
"=",
"re",
".",
"search",
"(",
"'/CN=([^/]+)/'",
",",
"dn_string",
")",
"if",
"search_string",
":",
"fullname",
"=",
"search_string",
".",
"group",
"(",
"1",
")",
"else",
":",
"raise",
"ValueError",
"return",
"(",
"user",
",",
"email",
",",
"fullname",
")"
] |
extract username .
|
train
| false
|
50,622
|
def libvlc_log_set(cb, data, p_instance):
f = (_Cfunctions.get('libvlc_log_set', None) or _Cfunction('libvlc_log_set', ((1,), (1,), (1,)), None, None, Instance, LogCb, ctypes.c_void_p))
return f(cb, data, p_instance)
|
[
"def",
"libvlc_log_set",
"(",
"cb",
",",
"data",
",",
"p_instance",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_log_set'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_log_set'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
")",
",",
"None",
",",
"None",
",",
"Instance",
",",
"LogCb",
",",
"ctypes",
".",
"c_void_p",
")",
")",
"return",
"f",
"(",
"cb",
",",
"data",
",",
"p_instance",
")"
] |
sets the logging callback for a libvlc instance .
|
train
| true
|
50,623
|
def get_kinds(start=None, end=None):
q = Kind.query()
if ((start is not None) and (start != '')):
q = q.filter((Kind.key >= Kind.key_for_kind(start)))
if (end is not None):
if (end == ''):
return []
q = q.filter((Kind.key < Kind.key_for_kind(end)))
return [x.kind_name for x in q]
|
[
"def",
"get_kinds",
"(",
"start",
"=",
"None",
",",
"end",
"=",
"None",
")",
":",
"q",
"=",
"Kind",
".",
"query",
"(",
")",
"if",
"(",
"(",
"start",
"is",
"not",
"None",
")",
"and",
"(",
"start",
"!=",
"''",
")",
")",
":",
"q",
"=",
"q",
".",
"filter",
"(",
"(",
"Kind",
".",
"key",
">=",
"Kind",
".",
"key_for_kind",
"(",
"start",
")",
")",
")",
"if",
"(",
"end",
"is",
"not",
"None",
")",
":",
"if",
"(",
"end",
"==",
"''",
")",
":",
"return",
"[",
"]",
"q",
"=",
"q",
".",
"filter",
"(",
"(",
"Kind",
".",
"key",
"<",
"Kind",
".",
"key_for_kind",
"(",
"end",
")",
")",
")",
"return",
"[",
"x",
".",
"kind_name",
"for",
"x",
"in",
"q",
"]"
] |
return all kinds in the specified range .
|
train
| true
|
50,624
|
def _qnwtrap1(n, a, b):
if (n < 1):
raise ValueError('n must be at least one')
nodes = np.linspace(a, b, n)
dx = (nodes[1] - nodes[0])
weights = (dx * np.ones(n))
weights[0] *= 0.5
weights[(-1)] *= 0.5
return (nodes, weights)
|
[
"def",
"_qnwtrap1",
"(",
"n",
",",
"a",
",",
"b",
")",
":",
"if",
"(",
"n",
"<",
"1",
")",
":",
"raise",
"ValueError",
"(",
"'n must be at least one'",
")",
"nodes",
"=",
"np",
".",
"linspace",
"(",
"a",
",",
"b",
",",
"n",
")",
"dx",
"=",
"(",
"nodes",
"[",
"1",
"]",
"-",
"nodes",
"[",
"0",
"]",
")",
"weights",
"=",
"(",
"dx",
"*",
"np",
".",
"ones",
"(",
"n",
")",
")",
"weights",
"[",
"0",
"]",
"*=",
"0.5",
"weights",
"[",
"(",
"-",
"1",
")",
"]",
"*=",
"0.5",
"return",
"(",
"nodes",
",",
"weights",
")"
] |
compute univariate trapezoid rule quadrature nodes and weights parameters n : int the number of nodes a : int the lower endpoint b : int the upper endpoint returns nodes : np .
|
train
| true
|
50,625
|
def block_until_ssh_open(ipstring, wait_time=10, timeout=120):
reached = False
t_elapsed = 0
while ((not reached) and (t_elapsed < timeout)):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ipstring, 22))
reached = True
except socket.error as err:
time.sleep(wait_time)
t_elapsed += wait_time
sock.close()
|
[
"def",
"block_until_ssh_open",
"(",
"ipstring",
",",
"wait_time",
"=",
"10",
",",
"timeout",
"=",
"120",
")",
":",
"reached",
"=",
"False",
"t_elapsed",
"=",
"0",
"while",
"(",
"(",
"not",
"reached",
")",
"and",
"(",
"t_elapsed",
"<",
"timeout",
")",
")",
":",
"try",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"sock",
".",
"connect",
"(",
"(",
"ipstring",
",",
"22",
")",
")",
"reached",
"=",
"True",
"except",
"socket",
".",
"error",
"as",
"err",
":",
"time",
".",
"sleep",
"(",
"wait_time",
")",
"t_elapsed",
"+=",
"wait_time",
"sock",
".",
"close",
"(",
")"
] |
blocks until server at ipstring has an open port 22 .
|
train
| false
|
50,627
|
def GetEnvironFallback(var_list, default):
for var in var_list:
if (var in os.environ):
return os.environ[var]
return default
|
[
"def",
"GetEnvironFallback",
"(",
"var_list",
",",
"default",
")",
":",
"for",
"var",
"in",
"var_list",
":",
"if",
"(",
"var",
"in",
"os",
".",
"environ",
")",
":",
"return",
"os",
".",
"environ",
"[",
"var",
"]",
"return",
"default"
] |
look up a key in the environment .
|
train
| false
|
50,628
|
def align_iterators(func, *iterables):
class myiter:
def __init__(self, it):
self.it = it
self.key = self.value = None
self.iternext()
def iternext(self):
try:
self.value = next(self.it)
self.key = func(self.value)
except StopIteration:
self.value = self.key = None
def __call__(self, key):
retval = None
if (key == self.key):
retval = self.value
self.iternext()
elif (self.key and (key > self.key)):
raise ValueError(u'Iterator has been left behind')
return retval
iters = [myiter(it) for it in iterables]
minvals = minkey = True
while True:
minvals = [_f for _f in [it.key for it in iters] if _f]
if minvals:
minkey = min(minvals)
(yield (minkey, [it(minkey) for it in iters]))
else:
break
|
[
"def",
"align_iterators",
"(",
"func",
",",
"*",
"iterables",
")",
":",
"class",
"myiter",
":",
"def",
"__init__",
"(",
"self",
",",
"it",
")",
":",
"self",
".",
"it",
"=",
"it",
"self",
".",
"key",
"=",
"self",
".",
"value",
"=",
"None",
"self",
".",
"iternext",
"(",
")",
"def",
"iternext",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"value",
"=",
"next",
"(",
"self",
".",
"it",
")",
"self",
".",
"key",
"=",
"func",
"(",
"self",
".",
"value",
")",
"except",
"StopIteration",
":",
"self",
".",
"value",
"=",
"self",
".",
"key",
"=",
"None",
"def",
"__call__",
"(",
"self",
",",
"key",
")",
":",
"retval",
"=",
"None",
"if",
"(",
"key",
"==",
"self",
".",
"key",
")",
":",
"retval",
"=",
"self",
".",
"value",
"self",
".",
"iternext",
"(",
")",
"elif",
"(",
"self",
".",
"key",
"and",
"(",
"key",
">",
"self",
".",
"key",
")",
")",
":",
"raise",
"ValueError",
"(",
"u'Iterator has been left behind'",
")",
"return",
"retval",
"iters",
"=",
"[",
"myiter",
"(",
"it",
")",
"for",
"it",
"in",
"iterables",
"]",
"minvals",
"=",
"minkey",
"=",
"True",
"while",
"True",
":",
"minvals",
"=",
"[",
"_f",
"for",
"_f",
"in",
"[",
"it",
".",
"key",
"for",
"it",
"in",
"iters",
"]",
"if",
"_f",
"]",
"if",
"minvals",
":",
"minkey",
"=",
"min",
"(",
"minvals",
")",
"(",
"yield",
"(",
"minkey",
",",
"[",
"it",
"(",
"minkey",
")",
"for",
"it",
"in",
"iters",
"]",
")",
")",
"else",
":",
"break"
] |
this generator takes a bunch of iterables that are ordered by func it sends out ordered tuples:: (func .
|
train
| false
|
50,630
|
def parse_filename(filename):
filename = parse_string(filename)
result = resource_find(filename)
if (result is None):
Logger.error(('Resource: unable to find <%s>' % filename))
return (result or filename)
|
[
"def",
"parse_filename",
"(",
"filename",
")",
":",
"filename",
"=",
"parse_string",
"(",
"filename",
")",
"result",
"=",
"resource_find",
"(",
"filename",
")",
"if",
"(",
"result",
"is",
"None",
")",
":",
"Logger",
".",
"error",
"(",
"(",
"'Resource: unable to find <%s>'",
"%",
"filename",
")",
")",
"return",
"(",
"result",
"or",
"filename",
")"
] |
parse a filename and search for it using resource_find() .
|
train
| false
|
50,633
|
def list_tiles(query, zoom_adjust):
resolution = (sum(query.resolution) / 2)
diff = ((_log(resolution) / _log(2)) - zoom_adjust)
zoom = round((meter_zoom + diff))
scale = (2 ** zoom)
mincol = int((scale * ((query.bbox.minx / diameter) + 0.5)))
maxcol = int((scale * ((query.bbox.maxx / diameter) + 0.5)))
minrow = int((scale * (0.5 - (query.bbox.maxy / diameter))))
maxrow = int((scale * (0.5 - (query.bbox.miny / diameter))))
(cols, rows) = (range(mincol, (maxcol + 1)), range(minrow, (maxrow + 1)))
return [dict(z=zoom, x=col, y=row) for (col, row) in product(cols, rows)]
|
[
"def",
"list_tiles",
"(",
"query",
",",
"zoom_adjust",
")",
":",
"resolution",
"=",
"(",
"sum",
"(",
"query",
".",
"resolution",
")",
"/",
"2",
")",
"diff",
"=",
"(",
"(",
"_log",
"(",
"resolution",
")",
"/",
"_log",
"(",
"2",
")",
")",
"-",
"zoom_adjust",
")",
"zoom",
"=",
"round",
"(",
"(",
"meter_zoom",
"+",
"diff",
")",
")",
"scale",
"=",
"(",
"2",
"**",
"zoom",
")",
"mincol",
"=",
"int",
"(",
"(",
"scale",
"*",
"(",
"(",
"query",
".",
"bbox",
".",
"minx",
"/",
"diameter",
")",
"+",
"0.5",
")",
")",
")",
"maxcol",
"=",
"int",
"(",
"(",
"scale",
"*",
"(",
"(",
"query",
".",
"bbox",
".",
"maxx",
"/",
"diameter",
")",
"+",
"0.5",
")",
")",
")",
"minrow",
"=",
"int",
"(",
"(",
"scale",
"*",
"(",
"0.5",
"-",
"(",
"query",
".",
"bbox",
".",
"maxy",
"/",
"diameter",
")",
")",
")",
")",
"maxrow",
"=",
"int",
"(",
"(",
"scale",
"*",
"(",
"0.5",
"-",
"(",
"query",
".",
"bbox",
".",
"miny",
"/",
"diameter",
")",
")",
")",
")",
"(",
"cols",
",",
"rows",
")",
"=",
"(",
"range",
"(",
"mincol",
",",
"(",
"maxcol",
"+",
"1",
")",
")",
",",
"range",
"(",
"minrow",
",",
"(",
"maxrow",
"+",
"1",
")",
")",
")",
"return",
"[",
"dict",
"(",
"z",
"=",
"zoom",
",",
"x",
"=",
"col",
",",
"y",
"=",
"row",
")",
"for",
"(",
"col",
",",
"row",
")",
"in",
"product",
"(",
"cols",
",",
"rows",
")",
"]"
] |
get a list of tile coordinates .
|
train
| false
|
50,634
|
def getgoal(path, opts=None):
cmd = 'mfsgetgoal'
ret = {}
if opts:
cmd += (' -' + opts)
else:
opts = ''
cmd += (' ' + path)
out = __salt__['cmd.run_all'](cmd, python_shell=False)
output = out['stdout'].splitlines()
if ('r' not in opts):
goal = output[0].split(': ')
ret = {'goal': goal[1]}
else:
for line in output:
if (not line):
continue
if (path in line):
continue
comps = line.split()
keytext = (comps[0] + ' with goal')
if (keytext not in ret):
ret[keytext] = {}
ret[keytext][comps[3]] = comps[5]
return ret
|
[
"def",
"getgoal",
"(",
"path",
",",
"opts",
"=",
"None",
")",
":",
"cmd",
"=",
"'mfsgetgoal'",
"ret",
"=",
"{",
"}",
"if",
"opts",
":",
"cmd",
"+=",
"(",
"' -'",
"+",
"opts",
")",
"else",
":",
"opts",
"=",
"''",
"cmd",
"+=",
"(",
"' '",
"+",
"path",
")",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"output",
"=",
"out",
"[",
"'stdout'",
"]",
".",
"splitlines",
"(",
")",
"if",
"(",
"'r'",
"not",
"in",
"opts",
")",
":",
"goal",
"=",
"output",
"[",
"0",
"]",
".",
"split",
"(",
"': '",
")",
"ret",
"=",
"{",
"'goal'",
":",
"goal",
"[",
"1",
"]",
"}",
"else",
":",
"for",
"line",
"in",
"output",
":",
"if",
"(",
"not",
"line",
")",
":",
"continue",
"if",
"(",
"path",
"in",
"line",
")",
":",
"continue",
"comps",
"=",
"line",
".",
"split",
"(",
")",
"keytext",
"=",
"(",
"comps",
"[",
"0",
"]",
"+",
"' with goal'",
")",
"if",
"(",
"keytext",
"not",
"in",
"ret",
")",
":",
"ret",
"[",
"keytext",
"]",
"=",
"{",
"}",
"ret",
"[",
"keytext",
"]",
"[",
"comps",
"[",
"3",
"]",
"]",
"=",
"comps",
"[",
"5",
"]",
"return",
"ret"
] |
return goal(s) for a file or directory cli example: .
|
train
| true
|
50,635
|
def initial_seed():
return default_generator.initial_seed()
|
[
"def",
"initial_seed",
"(",
")",
":",
"return",
"default_generator",
".",
"initial_seed",
"(",
")"
] |
returns the initial seed for generating random numbers as a python long .
|
train
| false
|
50,637
|
def _convert_warp_input(image, preserve_range):
if preserve_range:
image = image.astype(np.double)
else:
image = img_as_float(image)
return image
|
[
"def",
"_convert_warp_input",
"(",
"image",
",",
"preserve_range",
")",
":",
"if",
"preserve_range",
":",
"image",
"=",
"image",
".",
"astype",
"(",
"np",
".",
"double",
")",
"else",
":",
"image",
"=",
"img_as_float",
"(",
"image",
")",
"return",
"image"
] |
convert input image to double image with the appropriate range .
|
train
| false
|
50,638
|
@ignore_warnings
def test_sensitivity_specificity_support_errors():
(y_true, y_pred, _) = make_prediction(binary=True)
assert_raises(ValueError, sensitivity_specificity_support, y_true, y_pred, pos_label=2, average='binary')
assert_raises(ValueError, sensitivity_specificity_support, [0, 1, 2], [1, 2, 0], average='mega')
|
[
"@",
"ignore_warnings",
"def",
"test_sensitivity_specificity_support_errors",
"(",
")",
":",
"(",
"y_true",
",",
"y_pred",
",",
"_",
")",
"=",
"make_prediction",
"(",
"binary",
"=",
"True",
")",
"assert_raises",
"(",
"ValueError",
",",
"sensitivity_specificity_support",
",",
"y_true",
",",
"y_pred",
",",
"pos_label",
"=",
"2",
",",
"average",
"=",
"'binary'",
")",
"assert_raises",
"(",
"ValueError",
",",
"sensitivity_specificity_support",
",",
"[",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"1",
",",
"2",
",",
"0",
"]",
",",
"average",
"=",
"'mega'",
")"
] |
test either if an error is raised depending on parameters .
|
train
| false
|
50,639
|
def _HashPassword(password, version, salt):
if (version == _PASSWORD_VERSION_MD5):
m = hashlib.md5()
m.update(password)
m.update(secrets.GetSecret('cookie_secret'))
hashed = m.digest()
elif (version == _PASSWORD_VERSION_PBKDF2):
hashed = PBKDF2(password, base64.b64decode(salt), count=10000)
return base64.b32encode(hashed)
|
[
"def",
"_HashPassword",
"(",
"password",
",",
"version",
",",
"salt",
")",
":",
"if",
"(",
"version",
"==",
"_PASSWORD_VERSION_MD5",
")",
":",
"m",
"=",
"hashlib",
".",
"md5",
"(",
")",
"m",
".",
"update",
"(",
"password",
")",
"m",
".",
"update",
"(",
"secrets",
".",
"GetSecret",
"(",
"'cookie_secret'",
")",
")",
"hashed",
"=",
"m",
".",
"digest",
"(",
")",
"elif",
"(",
"version",
"==",
"_PASSWORD_VERSION_PBKDF2",
")",
":",
"hashed",
"=",
"PBKDF2",
"(",
"password",
",",
"base64",
".",
"b64decode",
"(",
"salt",
")",
",",
"count",
"=",
"10000",
")",
"return",
"base64",
".",
"b32encode",
"(",
"hashed",
")"
] |
hashes the provided password according to the specified versions policy .
|
train
| false
|
50,640
|
def process_show_version(net_device):
show_ver = net_device.show_version
(net_device.vendor, net_device.model) = obtain_vendor_model(show_ver)
net_device.os_version = obtain_os_version(show_ver)
net_device.uptime = obtain_uptime(show_ver)
net_device.hostname = obtain_hostname(show_ver)
net_device.serial_number = obtain_serial_number(show_ver)
net_device.device_type = obtain_device_type(net_device.model)
|
[
"def",
"process_show_version",
"(",
"net_device",
")",
":",
"show_ver",
"=",
"net_device",
".",
"show_version",
"(",
"net_device",
".",
"vendor",
",",
"net_device",
".",
"model",
")",
"=",
"obtain_vendor_model",
"(",
"show_ver",
")",
"net_device",
".",
"os_version",
"=",
"obtain_os_version",
"(",
"show_ver",
")",
"net_device",
".",
"uptime",
"=",
"obtain_uptime",
"(",
"show_ver",
")",
"net_device",
".",
"hostname",
"=",
"obtain_hostname",
"(",
"show_ver",
")",
"net_device",
".",
"serial_number",
"=",
"obtain_serial_number",
"(",
"show_ver",
")",
"net_device",
".",
"device_type",
"=",
"obtain_device_type",
"(",
"net_device",
".",
"model",
")"
] |
process the show_version output for net_device assign the following attributes to the net_device object hostname device_type # router .
|
train
| false
|
50,641
|
def get_new_coords(init_loc, distance, bearing):
origin = geopy.Point(init_loc[0], init_loc[1])
destination = geopy.distance.distance(kilometers=distance).destination(origin, bearing)
return (destination.latitude, destination.longitude)
|
[
"def",
"get_new_coords",
"(",
"init_loc",
",",
"distance",
",",
"bearing",
")",
":",
"origin",
"=",
"geopy",
".",
"Point",
"(",
"init_loc",
"[",
"0",
"]",
",",
"init_loc",
"[",
"1",
"]",
")",
"destination",
"=",
"geopy",
".",
"distance",
".",
"distance",
"(",
"kilometers",
"=",
"distance",
")",
".",
"destination",
"(",
"origin",
",",
"bearing",
")",
"return",
"(",
"destination",
".",
"latitude",
",",
"destination",
".",
"longitude",
")"
] |
given an initial lat/lng .
|
train
| false
|
50,642
|
def customClass(classes, argStr):
(cname, args, kwargs) = splitArgs(argStr)
cls = classes.get(cname, None)
if (not cls):
raise Exception(('error: %s is unknown - please specify one of %s' % (cname, classes.keys())))
if ((not args) and (not kwargs)):
return cls
return specialClass(cls, append=args, defaults=kwargs)
|
[
"def",
"customClass",
"(",
"classes",
",",
"argStr",
")",
":",
"(",
"cname",
",",
"args",
",",
"kwargs",
")",
"=",
"splitArgs",
"(",
"argStr",
")",
"cls",
"=",
"classes",
".",
"get",
"(",
"cname",
",",
"None",
")",
"if",
"(",
"not",
"cls",
")",
":",
"raise",
"Exception",
"(",
"(",
"'error: %s is unknown - please specify one of %s'",
"%",
"(",
"cname",
",",
"classes",
".",
"keys",
"(",
")",
")",
")",
")",
"if",
"(",
"(",
"not",
"args",
")",
"and",
"(",
"not",
"kwargs",
")",
")",
":",
"return",
"cls",
"return",
"specialClass",
"(",
"cls",
",",
"append",
"=",
"args",
",",
"defaults",
"=",
"kwargs",
")"
] |
return customized class based on argstr the args and key/val pairs in argstr will be automatically applied when the generated class is later used .
|
train
| false
|
50,643
|
def _immutable_fields_error(result_pipeline_definition):
for e in result_pipeline_definition['error']:
for e2 in e['errors']:
if ('can not be changed' in e2):
return True
return False
|
[
"def",
"_immutable_fields_error",
"(",
"result_pipeline_definition",
")",
":",
"for",
"e",
"in",
"result_pipeline_definition",
"[",
"'error'",
"]",
":",
"for",
"e2",
"in",
"e",
"[",
"'errors'",
"]",
":",
"if",
"(",
"'can not be changed'",
"in",
"e2",
")",
":",
"return",
"True",
"return",
"False"
] |
return true if update pipeline failed due to immutable fields some fields cannot be changed after a pipeline has been activated .
|
train
| false
|
50,644
|
def read_file_contents(path):
with open(path) as file_handle:
return file_handle.read()
|
[
"def",
"read_file_contents",
"(",
"path",
")",
":",
"with",
"open",
"(",
"path",
")",
"as",
"file_handle",
":",
"return",
"file_handle",
".",
"read",
"(",
")"
] |
reads the contents of the given file .
|
train
| false
|
50,646
|
def preprocess_form():
BoundField.label_tag = add_required_label_tag(BoundField.label_tag)
|
[
"def",
"preprocess_form",
"(",
")",
":",
"BoundField",
".",
"label_tag",
"=",
"add_required_label_tag",
"(",
"BoundField",
".",
"label_tag",
")"
] |
add asterisk to field labels .
|
train
| false
|
50,647
|
def _parse_relative_url(relative_url):
if (not relative_url):
raise _RelativeUrlError('Relative URL is empty')
(scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url)
if (scheme or netloc):
raise _RelativeUrlError('Relative URL may not have a scheme or location')
if fragment:
raise _RelativeUrlError('Relative URL may not specify a fragment')
if ((not path) or (path[0] != '/')):
raise _RelativeUrlError('Relative URL path must start with "/"')
return (path, query)
|
[
"def",
"_parse_relative_url",
"(",
"relative_url",
")",
":",
"if",
"(",
"not",
"relative_url",
")",
":",
"raise",
"_RelativeUrlError",
"(",
"'Relative URL is empty'",
")",
"(",
"scheme",
",",
"netloc",
",",
"path",
",",
"query",
",",
"fragment",
")",
"=",
"urlparse",
".",
"urlsplit",
"(",
"relative_url",
")",
"if",
"(",
"scheme",
"or",
"netloc",
")",
":",
"raise",
"_RelativeUrlError",
"(",
"'Relative URL may not have a scheme or location'",
")",
"if",
"fragment",
":",
"raise",
"_RelativeUrlError",
"(",
"'Relative URL may not specify a fragment'",
")",
"if",
"(",
"(",
"not",
"path",
")",
"or",
"(",
"path",
"[",
"0",
"]",
"!=",
"'/'",
")",
")",
":",
"raise",
"_RelativeUrlError",
"(",
"'Relative URL path must start with \"/\"'",
")",
"return",
"(",
"path",
",",
"query",
")"
] |
parses a relative url and splits it into its path and query string .
|
train
| false
|
50,648
|
def _ToSearchError(error):
if (error.application_error in _ERROR_MAP):
return _ERROR_MAP[error.application_error](error.error_detail)
return error
|
[
"def",
"_ToSearchError",
"(",
"error",
")",
":",
"if",
"(",
"error",
".",
"application_error",
"in",
"_ERROR_MAP",
")",
":",
"return",
"_ERROR_MAP",
"[",
"error",
".",
"application_error",
"]",
"(",
"error",
".",
"error_detail",
")",
"return",
"error"
] |
translate an application error to a search error .
|
train
| false
|
50,651
|
@click.command(u'trigger-scheduler-event')
@click.argument(u'event')
@pass_context
def trigger_scheduler_event(context, event):
import frappe.utils.scheduler
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.utils.scheduler.trigger(site, event, now=True)
finally:
frappe.destroy()
|
[
"@",
"click",
".",
"command",
"(",
"u'trigger-scheduler-event'",
")",
"@",
"click",
".",
"argument",
"(",
"u'event'",
")",
"@",
"pass_context",
"def",
"trigger_scheduler_event",
"(",
"context",
",",
"event",
")",
":",
"import",
"frappe",
".",
"utils",
".",
"scheduler",
"for",
"site",
"in",
"context",
".",
"sites",
":",
"try",
":",
"frappe",
".",
"init",
"(",
"site",
"=",
"site",
")",
"frappe",
".",
"connect",
"(",
")",
"frappe",
".",
"utils",
".",
"scheduler",
".",
"trigger",
"(",
"site",
",",
"event",
",",
"now",
"=",
"True",
")",
"finally",
":",
"frappe",
".",
"destroy",
"(",
")"
] |
trigger a scheduler event .
|
train
| false
|
50,652
|
def write_dig_points(fid, dig, block=False, coord_frame=None):
if (dig is not None):
data_size = (5 * 4)
if block:
start_block(fid, FIFF.FIFFB_ISOTRAK)
if (coord_frame is not None):
write_int(fid, FIFF.FIFF_MNE_COORD_FRAME, coord_frame)
for d in dig:
fid.write(np.array(FIFF.FIFF_DIG_POINT, '>i4').tostring())
fid.write(np.array(FIFF.FIFFT_DIG_POINT_STRUCT, '>i4').tostring())
fid.write(np.array(data_size, dtype='>i4').tostring())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, '>i4').tostring())
fid.write(np.array(d['kind'], '>i4').tostring())
fid.write(np.array(d['ident'], '>i4').tostring())
fid.write(np.array(d['r'][:3], '>f4').tostring())
if block:
end_block(fid, FIFF.FIFFB_ISOTRAK)
|
[
"def",
"write_dig_points",
"(",
"fid",
",",
"dig",
",",
"block",
"=",
"False",
",",
"coord_frame",
"=",
"None",
")",
":",
"if",
"(",
"dig",
"is",
"not",
"None",
")",
":",
"data_size",
"=",
"(",
"5",
"*",
"4",
")",
"if",
"block",
":",
"start_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_ISOTRAK",
")",
"if",
"(",
"coord_frame",
"is",
"not",
"None",
")",
":",
"write_int",
"(",
"fid",
",",
"FIFF",
".",
"FIFF_MNE_COORD_FRAME",
",",
"coord_frame",
")",
"for",
"d",
"in",
"dig",
":",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"FIFF",
".",
"FIFF_DIG_POINT",
",",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"FIFF",
".",
"FIFFT_DIG_POINT_STRUCT",
",",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"data_size",
",",
"dtype",
"=",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"FIFF",
".",
"FIFFV_NEXT_SEQ",
",",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"d",
"[",
"'kind'",
"]",
",",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"d",
"[",
"'ident'",
"]",
",",
"'>i4'",
")",
".",
"tostring",
"(",
")",
")",
"fid",
".",
"write",
"(",
"np",
".",
"array",
"(",
"d",
"[",
"'r'",
"]",
"[",
":",
"3",
"]",
",",
"'>f4'",
")",
".",
"tostring",
"(",
")",
")",
"if",
"block",
":",
"end_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_ISOTRAK",
")"
] |
write a set of digitizer data points into a fif file .
|
train
| false
|
50,654
|
def delta_resolution(dt, delta):
delta = timedelta_seconds(delta)
resolutions = ((3, (lambda x: (x / 86400))), (4, (lambda x: (x / 3600))), (5, (lambda x: (x / 60))))
args = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
for (res, predicate) in resolutions:
if (predicate(delta) >= 1.0):
return datetime(*args[:res])
return dt
|
[
"def",
"delta_resolution",
"(",
"dt",
",",
"delta",
")",
":",
"delta",
"=",
"timedelta_seconds",
"(",
"delta",
")",
"resolutions",
"=",
"(",
"(",
"3",
",",
"(",
"lambda",
"x",
":",
"(",
"x",
"/",
"86400",
")",
")",
")",
",",
"(",
"4",
",",
"(",
"lambda",
"x",
":",
"(",
"x",
"/",
"3600",
")",
")",
")",
",",
"(",
"5",
",",
"(",
"lambda",
"x",
":",
"(",
"x",
"/",
"60",
")",
")",
")",
")",
"args",
"=",
"(",
"dt",
".",
"year",
",",
"dt",
".",
"month",
",",
"dt",
".",
"day",
",",
"dt",
".",
"hour",
",",
"dt",
".",
"minute",
",",
"dt",
".",
"second",
")",
"for",
"(",
"res",
",",
"predicate",
")",
"in",
"resolutions",
":",
"if",
"(",
"predicate",
"(",
"delta",
")",
">=",
"1.0",
")",
":",
"return",
"datetime",
"(",
"*",
"args",
"[",
":",
"res",
"]",
")",
"return",
"dt"
] |
round a datetime to the resolution of a timedelta .
|
train
| false
|
50,655
|
def _logs_exist(fs, path):
try:
return fs.exists(path)
except IOError:
return None
|
[
"def",
"_logs_exist",
"(",
"fs",
",",
"path",
")",
":",
"try",
":",
"return",
"fs",
".",
"exists",
"(",
"path",
")",
"except",
"IOError",
":",
"return",
"None"
] |
do fs .
|
train
| false
|
50,658
|
def graph_laplacian(csgraph, normed=False, return_diag=False):
if ((csgraph.ndim != 2) or (csgraph.shape[0] != csgraph.shape[1])):
raise ValueError('csgraph must be a square matrix or array')
if (normed and (np.issubdtype(csgraph.dtype, np.int) or np.issubdtype(csgraph.dtype, np.uint))):
csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True)
if sparse.isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed, return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed, return_diag=return_diag)
|
[
"def",
"graph_laplacian",
"(",
"csgraph",
",",
"normed",
"=",
"False",
",",
"return_diag",
"=",
"False",
")",
":",
"if",
"(",
"(",
"csgraph",
".",
"ndim",
"!=",
"2",
")",
"or",
"(",
"csgraph",
".",
"shape",
"[",
"0",
"]",
"!=",
"csgraph",
".",
"shape",
"[",
"1",
"]",
")",
")",
":",
"raise",
"ValueError",
"(",
"'csgraph must be a square matrix or array'",
")",
"if",
"(",
"normed",
"and",
"(",
"np",
".",
"issubdtype",
"(",
"csgraph",
".",
"dtype",
",",
"np",
".",
"int",
")",
"or",
"np",
".",
"issubdtype",
"(",
"csgraph",
".",
"dtype",
",",
"np",
".",
"uint",
")",
")",
")",
":",
"csgraph",
"=",
"check_array",
"(",
"csgraph",
",",
"dtype",
"=",
"np",
".",
"float64",
",",
"accept_sparse",
"=",
"True",
")",
"if",
"sparse",
".",
"isspmatrix",
"(",
"csgraph",
")",
":",
"return",
"_laplacian_sparse",
"(",
"csgraph",
",",
"normed",
"=",
"normed",
",",
"return_diag",
"=",
"return_diag",
")",
"else",
":",
"return",
"_laplacian_dense",
"(",
"csgraph",
",",
"normed",
"=",
"normed",
",",
"return_diag",
"=",
"return_diag",
")"
] |
return the laplacian matrix of a directed graph .
|
train
| false
|
50,659
|
def expand_otu_ids(otu_map, otus_to_expand, ignore_missing=False):
result = []
for o in otus_to_expand:
otu_id = o.split()[0]
try:
result += otu_map[otu_id]
except KeyError:
if ignore_missing:
continue
else:
raise KeyError(('OTU id not in OTU map: %s' % o.split()[0]))
return result
|
[
"def",
"expand_otu_ids",
"(",
"otu_map",
",",
"otus_to_expand",
",",
"ignore_missing",
"=",
"False",
")",
":",
"result",
"=",
"[",
"]",
"for",
"o",
"in",
"otus_to_expand",
":",
"otu_id",
"=",
"o",
".",
"split",
"(",
")",
"[",
"0",
"]",
"try",
":",
"result",
"+=",
"otu_map",
"[",
"otu_id",
"]",
"except",
"KeyError",
":",
"if",
"ignore_missing",
":",
"continue",
"else",
":",
"raise",
"KeyError",
"(",
"(",
"'OTU id not in OTU map: %s'",
"%",
"o",
".",
"split",
"(",
")",
"[",
"0",
"]",
")",
")",
"return",
"result"
] |
from otu map and otu ids .
|
train
| false
|
50,660
|
def esc(s, esc_chars):
if (not s):
return u''
for c in esc_chars:
esc_str = (u'\\' + c)
s = s.replace(c, esc_str)
return s
|
[
"def",
"esc",
"(",
"s",
",",
"esc_chars",
")",
":",
"if",
"(",
"not",
"s",
")",
":",
"return",
"u''",
"for",
"c",
"in",
"esc_chars",
":",
"esc_str",
"=",
"(",
"u'\\\\'",
"+",
"c",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"c",
",",
"esc_str",
")",
"return",
"s"
] |
escape special characters .
|
train
| false
|
50,661
|
def igcd(*args):
if (len(args) < 2):
raise TypeError(('igcd() takes at least 2 arguments (%s given)' % len(args)))
if (1 in args):
a = 1
k = 0
else:
a = abs(as_int(args[0]))
k = 1
if (a != 1):
while (k < len(args)):
b = args[k]
k += 1
try:
a = _gcdcache[(a, b)]
except KeyError:
b = as_int(b)
if (not b):
continue
if (b == 1):
a = 1
break
if (b < 0):
b = (- b)
t = (a, b)
while b:
(a, b) = (b, (a % b))
_gcdcache[t] = _gcdcache[(t[1], t[0])] = a
while (k < len(args)):
ok = as_int(args[k])
k += 1
return a
|
[
"def",
"igcd",
"(",
"*",
"args",
")",
":",
"if",
"(",
"len",
"(",
"args",
")",
"<",
"2",
")",
":",
"raise",
"TypeError",
"(",
"(",
"'igcd() takes at least 2 arguments (%s given)'",
"%",
"len",
"(",
"args",
")",
")",
")",
"if",
"(",
"1",
"in",
"args",
")",
":",
"a",
"=",
"1",
"k",
"=",
"0",
"else",
":",
"a",
"=",
"abs",
"(",
"as_int",
"(",
"args",
"[",
"0",
"]",
")",
")",
"k",
"=",
"1",
"if",
"(",
"a",
"!=",
"1",
")",
":",
"while",
"(",
"k",
"<",
"len",
"(",
"args",
")",
")",
":",
"b",
"=",
"args",
"[",
"k",
"]",
"k",
"+=",
"1",
"try",
":",
"a",
"=",
"_gcdcache",
"[",
"(",
"a",
",",
"b",
")",
"]",
"except",
"KeyError",
":",
"b",
"=",
"as_int",
"(",
"b",
")",
"if",
"(",
"not",
"b",
")",
":",
"continue",
"if",
"(",
"b",
"==",
"1",
")",
":",
"a",
"=",
"1",
"break",
"if",
"(",
"b",
"<",
"0",
")",
":",
"b",
"=",
"(",
"-",
"b",
")",
"t",
"=",
"(",
"a",
",",
"b",
")",
"while",
"b",
":",
"(",
"a",
",",
"b",
")",
"=",
"(",
"b",
",",
"(",
"a",
"%",
"b",
")",
")",
"_gcdcache",
"[",
"t",
"]",
"=",
"_gcdcache",
"[",
"(",
"t",
"[",
"1",
"]",
",",
"t",
"[",
"0",
"]",
")",
"]",
"=",
"a",
"while",
"(",
"k",
"<",
"len",
"(",
"args",
")",
")",
":",
"ok",
"=",
"as_int",
"(",
"args",
"[",
"k",
"]",
")",
"k",
"+=",
"1",
"return",
"a"
] |
computes nonnegative integer greatest common divisor .
|
train
| false
|
50,662
|
def penalty_string(distance, limit=None):
penalties = []
for key in distance.keys():
key = key.replace('album_', '')
key = key.replace('track_', '')
key = key.replace('_', ' ')
penalties.append(key)
if penalties:
if (limit and (len(penalties) > limit)):
penalties = (penalties[:limit] + ['...'])
return ui.colorize('text_warning', (u'(%s)' % ', '.join(penalties)))
|
[
"def",
"penalty_string",
"(",
"distance",
",",
"limit",
"=",
"None",
")",
":",
"penalties",
"=",
"[",
"]",
"for",
"key",
"in",
"distance",
".",
"keys",
"(",
")",
":",
"key",
"=",
"key",
".",
"replace",
"(",
"'album_'",
",",
"''",
")",
"key",
"=",
"key",
".",
"replace",
"(",
"'track_'",
",",
"''",
")",
"key",
"=",
"key",
".",
"replace",
"(",
"'_'",
",",
"' '",
")",
"penalties",
".",
"append",
"(",
"key",
")",
"if",
"penalties",
":",
"if",
"(",
"limit",
"and",
"(",
"len",
"(",
"penalties",
")",
">",
"limit",
")",
")",
":",
"penalties",
"=",
"(",
"penalties",
"[",
":",
"limit",
"]",
"+",
"[",
"'...'",
"]",
")",
"return",
"ui",
".",
"colorize",
"(",
"'text_warning'",
",",
"(",
"u'(%s)'",
"%",
"', '",
".",
"join",
"(",
"penalties",
")",
")",
")"
] |
returns a colorized string that indicates all the penalties applied to a distance object .
|
train
| false
|
50,663
|
def conv_input_length(output_length, filter_size, stride, pad=0):
if (output_length is None):
return None
if (pad == 'valid'):
pad = 0
elif (pad == 'full'):
pad = (filter_size - 1)
elif (pad == 'same'):
pad = (filter_size // 2)
if (not isinstance(pad, int)):
raise ValueError('Invalid pad: {0}'.format(pad))
return ((((output_length - 1) * stride) - (2 * pad)) + filter_size)
|
[
"def",
"conv_input_length",
"(",
"output_length",
",",
"filter_size",
",",
"stride",
",",
"pad",
"=",
"0",
")",
":",
"if",
"(",
"output_length",
"is",
"None",
")",
":",
"return",
"None",
"if",
"(",
"pad",
"==",
"'valid'",
")",
":",
"pad",
"=",
"0",
"elif",
"(",
"pad",
"==",
"'full'",
")",
":",
"pad",
"=",
"(",
"filter_size",
"-",
"1",
")",
"elif",
"(",
"pad",
"==",
"'same'",
")",
":",
"pad",
"=",
"(",
"filter_size",
"//",
"2",
")",
"if",
"(",
"not",
"isinstance",
"(",
"pad",
",",
"int",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid pad: {0}'",
".",
"format",
"(",
"pad",
")",
")",
"return",
"(",
"(",
"(",
"(",
"output_length",
"-",
"1",
")",
"*",
"stride",
")",
"-",
"(",
"2",
"*",
"pad",
")",
")",
"+",
"filter_size",
")"
] |
helper function to compute the input size of a convolution operation this function computes the length along a single axis .
|
train
| false
|
50,664
|
def libvlc_log_get_context(ctx):
f = (_Cfunctions.get('libvlc_log_get_context', None) or _Cfunction('libvlc_log_get_context', ((1,), (2,), (2,), (2,)), None, None, Log_ptr, ListPOINTER(ctypes.c_char_p), ListPOINTER(ctypes.c_char_p), ctypes.POINTER(ctypes.c_uint)))
return f(ctx)
|
[
"def",
"libvlc_log_get_context",
"(",
"ctx",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_log_get_context'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_log_get_context'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"2",
",",
")",
",",
"(",
"2",
",",
")",
",",
"(",
"2",
",",
")",
")",
",",
"None",
",",
"None",
",",
"Log_ptr",
",",
"ListPOINTER",
"(",
"ctypes",
".",
"c_char_p",
")",
",",
"ListPOINTER",
"(",
"ctypes",
".",
"c_char_p",
")",
",",
"ctypes",
".",
"POINTER",
"(",
"ctypes",
".",
"c_uint",
")",
")",
")",
"return",
"f",
"(",
"ctx",
")"
] |
gets debugging informations about a log message: the name of the vlc module emitting the message and the message location within the source code .
|
train
| true
|
50,665
|
def match_anywhere(needles, haystack, ignore_case=False):
regex_needle = (('.*' + '.*'.join(imap(re.escape, needles))) + '.*')
regex_flags = ((re.IGNORECASE | re.UNICODE) if ignore_case else re.UNICODE)
found = (lambda haystack: re.search(regex_needle, haystack.path, flags=regex_flags))
return ifilter(found, haystack)
|
[
"def",
"match_anywhere",
"(",
"needles",
",",
"haystack",
",",
"ignore_case",
"=",
"False",
")",
":",
"regex_needle",
"=",
"(",
"(",
"'.*'",
"+",
"'.*'",
".",
"join",
"(",
"imap",
"(",
"re",
".",
"escape",
",",
"needles",
")",
")",
")",
"+",
"'.*'",
")",
"regex_flags",
"=",
"(",
"(",
"re",
".",
"IGNORECASE",
"|",
"re",
".",
"UNICODE",
")",
"if",
"ignore_case",
"else",
"re",
".",
"UNICODE",
")",
"found",
"=",
"(",
"lambda",
"haystack",
":",
"re",
".",
"search",
"(",
"regex_needle",
",",
"haystack",
".",
"path",
",",
"flags",
"=",
"regex_flags",
")",
")",
"return",
"ifilter",
"(",
"found",
",",
"haystack",
")"
] |
matches needles anywhere in the path as long as theyre in the same order .
|
train
| false
|
50,666
|
def test_server_error_exception(exception_app):
(request, response) = sanic_endpoint_test(exception_app, uri='/error')
assert (response.status == 500)
|
[
"def",
"test_server_error_exception",
"(",
"exception_app",
")",
":",
"(",
"request",
",",
"response",
")",
"=",
"sanic_endpoint_test",
"(",
"exception_app",
",",
"uri",
"=",
"'/error'",
")",
"assert",
"(",
"response",
".",
"status",
"==",
"500",
")"
] |
test the built-in servererror exception works .
|
train
| false
|
50,668
|
def chloginclass(name, loginclass, root=None):
if (__grains__['kernel'] != 'OpenBSD'):
return False
if (loginclass == get_loginclass(name)):
return True
cmd = ['usermod', '-L', '{0}'.format(loginclass), '{0}'.format(name)]
if (root is not None):
cmd.extend(('-R', root))
__salt__['cmd.run'](cmd, python_shell=False)
return (get_loginclass(name) == loginclass)
|
[
"def",
"chloginclass",
"(",
"name",
",",
"loginclass",
",",
"root",
"=",
"None",
")",
":",
"if",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"!=",
"'OpenBSD'",
")",
":",
"return",
"False",
"if",
"(",
"loginclass",
"==",
"get_loginclass",
"(",
"name",
")",
")",
":",
"return",
"True",
"cmd",
"=",
"[",
"'usermod'",
",",
"'-L'",
",",
"'{0}'",
".",
"format",
"(",
"loginclass",
")",
",",
"'{0}'",
".",
"format",
"(",
"name",
")",
"]",
"if",
"(",
"root",
"is",
"not",
"None",
")",
":",
"cmd",
".",
"extend",
"(",
"(",
"'-R'",
",",
"root",
")",
")",
"__salt__",
"[",
"'cmd.run'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"return",
"(",
"get_loginclass",
"(",
"name",
")",
"==",
"loginclass",
")"
] |
change the default login class of the user .
|
train
| true
|
50,669
|
def find_staff_lock_source(xblock):
if xblock.fields['visible_to_staff_only'].is_set_on(xblock):
return xblock
if (xblock.category == 'chapter'):
return None
parent_location = modulestore().get_parent_location(xblock.location, revision=ModuleStoreEnum.RevisionOption.draft_preferred)
if (not parent_location):
return None
parent = modulestore().get_item(parent_location)
return find_staff_lock_source(parent)
|
[
"def",
"find_staff_lock_source",
"(",
"xblock",
")",
":",
"if",
"xblock",
".",
"fields",
"[",
"'visible_to_staff_only'",
"]",
".",
"is_set_on",
"(",
"xblock",
")",
":",
"return",
"xblock",
"if",
"(",
"xblock",
".",
"category",
"==",
"'chapter'",
")",
":",
"return",
"None",
"parent_location",
"=",
"modulestore",
"(",
")",
".",
"get_parent_location",
"(",
"xblock",
".",
"location",
",",
"revision",
"=",
"ModuleStoreEnum",
".",
"RevisionOption",
".",
"draft_preferred",
")",
"if",
"(",
"not",
"parent_location",
")",
":",
"return",
"None",
"parent",
"=",
"modulestore",
"(",
")",
".",
"get_item",
"(",
"parent_location",
")",
"return",
"find_staff_lock_source",
"(",
"parent",
")"
] |
returns the xblock responsible for setting this xblocks staff lock .
|
train
| false
|
50,670
|
def try_run(commands):
null = open(os.devnull, 'w')
for command in commands:
retcode = subprocess.call(command, stdout=null, shell=True, stderr=subprocess.STDOUT)
if (retcode != 0):
return False
return True
|
[
"def",
"try_run",
"(",
"commands",
")",
":",
"null",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'w'",
")",
"for",
"command",
"in",
"commands",
":",
"retcode",
"=",
"subprocess",
".",
"call",
"(",
"command",
",",
"stdout",
"=",
"null",
",",
"shell",
"=",
"True",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")",
"if",
"(",
"retcode",
"!=",
"0",
")",
":",
"return",
"False",
"return",
"True"
] |
run a series of commands and only return true if all ran fine .
|
train
| false
|
50,672
|
def dict_of_key_value_pairs(arg):
lexer = shlex.shlex(str(arg))
lexer.wordchars += '/.+-():'
tokens = list(lexer)
tokens_len = len(tokens)
D = {}
i = 0
while (i < tokens_len):
k_eq_v = tokens[i:(i + 3)]
if ((len(k_eq_v) != 3) or (k_eq_v[1] != '=')):
raise ValueError(("Unexpected end of key/value pairs in value '%s'" % arg))
D[k_eq_v[0]] = k_eq_v[2].strip('\'"')
i += 4
return D
|
[
"def",
"dict_of_key_value_pairs",
"(",
"arg",
")",
":",
"lexer",
"=",
"shlex",
".",
"shlex",
"(",
"str",
"(",
"arg",
")",
")",
"lexer",
".",
"wordchars",
"+=",
"'/.+-():'",
"tokens",
"=",
"list",
"(",
"lexer",
")",
"tokens_len",
"=",
"len",
"(",
"tokens",
")",
"D",
"=",
"{",
"}",
"i",
"=",
"0",
"while",
"(",
"i",
"<",
"tokens_len",
")",
":",
"k_eq_v",
"=",
"tokens",
"[",
"i",
":",
"(",
"i",
"+",
"3",
")",
"]",
"if",
"(",
"(",
"len",
"(",
"k_eq_v",
")",
"!=",
"3",
")",
"or",
"(",
"k_eq_v",
"[",
"1",
"]",
"!=",
"'='",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"\"Unexpected end of key/value pairs in value '%s'\"",
"%",
"arg",
")",
")",
"D",
"[",
"k_eq_v",
"[",
"0",
"]",
"]",
"=",
"k_eq_v",
"[",
"2",
"]",
".",
"strip",
"(",
"'\\'\"'",
")",
"i",
"+=",
"4",
"return",
"D"
] |
parse key=val .
|
train
| false
|
50,674
|
def get_api_client(api_config, student):
client_name = api_config.OAUTH2_CLIENT_NAME
try:
client = Client.objects.get(name=client_name)
except Client.DoesNotExist:
raise ImproperlyConfigured('OAuth2 Client with name [{}] does not exist.'.format(client_name))
scopes = ['email', 'profile']
expires_in = settings.OAUTH_ID_TOKEN_EXPIRATION
jwt = JwtBuilder(student, secret=client.client_secret).build_token(scopes, expires_in, aud=client.client_id)
return EdxRestApiClient(api_config.internal_api_url, jwt=jwt)
|
[
"def",
"get_api_client",
"(",
"api_config",
",",
"student",
")",
":",
"client_name",
"=",
"api_config",
".",
"OAUTH2_CLIENT_NAME",
"try",
":",
"client",
"=",
"Client",
".",
"objects",
".",
"get",
"(",
"name",
"=",
"client_name",
")",
"except",
"Client",
".",
"DoesNotExist",
":",
"raise",
"ImproperlyConfigured",
"(",
"'OAuth2 Client with name [{}] does not exist.'",
".",
"format",
"(",
"client_name",
")",
")",
"scopes",
"=",
"[",
"'email'",
",",
"'profile'",
"]",
"expires_in",
"=",
"settings",
".",
"OAUTH_ID_TOKEN_EXPIRATION",
"jwt",
"=",
"JwtBuilder",
"(",
"student",
",",
"secret",
"=",
"client",
".",
"client_secret",
")",
".",
"build_token",
"(",
"scopes",
",",
"expires_in",
",",
"aud",
"=",
"client",
".",
"client_id",
")",
"return",
"EdxRestApiClient",
"(",
"api_config",
".",
"internal_api_url",
",",
"jwt",
"=",
"jwt",
")"
] |
create and configure an api client for authenticated http requests .
|
train
| false
|
50,675
|
def in_string(state, text, i, formats, user_data):
q = (u'"' if (state.parse == IN_DQS) else u"'")
pos = text.find(q, i)
if (pos == (-1)):
if (text[(-1)] == u'\\'):
return [((len(text) - i), formats[u'string'])]
state.parse = (NORMAL if (state.blocks < 1) else IN_CONTENT)
return [((len(text) - i), formats[u'unterminated-string'])]
state.parse = (NORMAL if (state.blocks < 1) else IN_CONTENT)
return [(((pos - i) + len(q)), formats[u'string'])]
|
[
"def",
"in_string",
"(",
"state",
",",
"text",
",",
"i",
",",
"formats",
",",
"user_data",
")",
":",
"q",
"=",
"(",
"u'\"'",
"if",
"(",
"state",
".",
"parse",
"==",
"IN_DQS",
")",
"else",
"u\"'\"",
")",
"pos",
"=",
"text",
".",
"find",
"(",
"q",
",",
"i",
")",
"if",
"(",
"pos",
"==",
"(",
"-",
"1",
")",
")",
":",
"if",
"(",
"text",
"[",
"(",
"-",
"1",
")",
"]",
"==",
"u'\\\\'",
")",
":",
"return",
"[",
"(",
"(",
"len",
"(",
"text",
")",
"-",
"i",
")",
",",
"formats",
"[",
"u'string'",
"]",
")",
"]",
"state",
".",
"parse",
"=",
"(",
"NORMAL",
"if",
"(",
"state",
".",
"blocks",
"<",
"1",
")",
"else",
"IN_CONTENT",
")",
"return",
"[",
"(",
"(",
"len",
"(",
"text",
")",
"-",
"i",
")",
",",
"formats",
"[",
"u'unterminated-string'",
"]",
")",
"]",
"state",
".",
"parse",
"=",
"(",
"NORMAL",
"if",
"(",
"state",
".",
"blocks",
"<",
"1",
")",
"else",
"IN_CONTENT",
")",
"return",
"[",
"(",
"(",
"(",
"pos",
"-",
"i",
")",
"+",
"len",
"(",
"q",
")",
")",
",",
"formats",
"[",
"u'string'",
"]",
")",
"]"
] |
inside a string .
|
train
| false
|
50,676
|
def py_conv(img, kern, mode, subsample):
if isinstance(mode, int):
mode = (mode, mode)
if isinstance(mode, tuple):
(pad_h, pad_w) = map(int, mode)
img = py_conv_pad_img(img, pad_h, pad_w)
mode = 'valid'
if imported_scipy_convolve2d:
return py_conv_scipy(img, kern, mode, subsample)
elif (mode == 'valid'):
return py_conv_valid_numpy(img, kern)[:, :, ::subsample[0], ::subsample[1]]
elif (mode == 'full'):
return py_conv_full_numpy(img, kern)[:, :, ::subsample[0], ::subsample[1]]
else:
raise Exception("Can't execute this kernel.")
|
[
"def",
"py_conv",
"(",
"img",
",",
"kern",
",",
"mode",
",",
"subsample",
")",
":",
"if",
"isinstance",
"(",
"mode",
",",
"int",
")",
":",
"mode",
"=",
"(",
"mode",
",",
"mode",
")",
"if",
"isinstance",
"(",
"mode",
",",
"tuple",
")",
":",
"(",
"pad_h",
",",
"pad_w",
")",
"=",
"map",
"(",
"int",
",",
"mode",
")",
"img",
"=",
"py_conv_pad_img",
"(",
"img",
",",
"pad_h",
",",
"pad_w",
")",
"mode",
"=",
"'valid'",
"if",
"imported_scipy_convolve2d",
":",
"return",
"py_conv_scipy",
"(",
"img",
",",
"kern",
",",
"mode",
",",
"subsample",
")",
"elif",
"(",
"mode",
"==",
"'valid'",
")",
":",
"return",
"py_conv_valid_numpy",
"(",
"img",
",",
"kern",
")",
"[",
":",
",",
":",
",",
":",
":",
"subsample",
"[",
"0",
"]",
",",
":",
":",
"subsample",
"[",
"1",
"]",
"]",
"elif",
"(",
"mode",
"==",
"'full'",
")",
":",
"return",
"py_conv_full_numpy",
"(",
"img",
",",
"kern",
")",
"[",
":",
",",
":",
",",
":",
":",
"subsample",
"[",
"0",
"]",
",",
":",
":",
"subsample",
"[",
"1",
"]",
"]",
"else",
":",
"raise",
"Exception",
"(",
"\"Can't execute this kernel.\"",
")"
] |
use a scipy or numpy implementation depending is scipy is available .
|
train
| false
|
50,677
|
def requote_uri(uri):
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
try:
return quote(unquote_unreserved(uri), safe=safe_with_percent)
except InvalidURL:
return quote(uri, safe=safe_without_percent)
|
[
"def",
"requote_uri",
"(",
"uri",
")",
":",
"safe_with_percent",
"=",
"\"!#$%&'()*+,/:;=?@[]~\"",
"safe_without_percent",
"=",
"\"!#$&'()*+,/:;=?@[]~\"",
"try",
":",
"return",
"quote",
"(",
"unquote_unreserved",
"(",
"uri",
")",
",",
"safe",
"=",
"safe_with_percent",
")",
"except",
"InvalidURL",
":",
"return",
"quote",
"(",
"uri",
",",
"safe",
"=",
"safe_without_percent",
")"
] |
re-quote the given uri .
|
train
| true
|
50,678
|
def _compile_firstset(info, fs):
reverse = bool((info.flags & REVERSE))
fs = _check_firstset(info, reverse, fs)
if (not fs):
return []
return fs.compile(reverse)
|
[
"def",
"_compile_firstset",
"(",
"info",
",",
"fs",
")",
":",
"reverse",
"=",
"bool",
"(",
"(",
"info",
".",
"flags",
"&",
"REVERSE",
")",
")",
"fs",
"=",
"_check_firstset",
"(",
"info",
",",
"reverse",
",",
"fs",
")",
"if",
"(",
"not",
"fs",
")",
":",
"return",
"[",
"]",
"return",
"fs",
".",
"compile",
"(",
"reverse",
")"
] |
compiles the firstset for the pattern .
|
train
| false
|
50,680
|
def _get_index_name_by_column(table, column_name):
protected_name = metadata.protect_name(column_name)
possible_index_values = [protected_name, ('values(%s)' % protected_name)]
for index_metadata in table.indexes.values():
options = dict(index_metadata.index_options)
if (options.get('target') in possible_index_values):
return index_metadata.name
|
[
"def",
"_get_index_name_by_column",
"(",
"table",
",",
"column_name",
")",
":",
"protected_name",
"=",
"metadata",
".",
"protect_name",
"(",
"column_name",
")",
"possible_index_values",
"=",
"[",
"protected_name",
",",
"(",
"'values(%s)'",
"%",
"protected_name",
")",
"]",
"for",
"index_metadata",
"in",
"table",
".",
"indexes",
".",
"values",
"(",
")",
":",
"options",
"=",
"dict",
"(",
"index_metadata",
".",
"index_options",
")",
"if",
"(",
"options",
".",
"get",
"(",
"'target'",
")",
"in",
"possible_index_values",
")",
":",
"return",
"index_metadata",
".",
"name"
] |
find the index name for a given table and column .
|
train
| true
|
50,684
|
def disjoint_union_all(graphs):
graphs = iter(graphs)
U = next(graphs)
for H in graphs:
U = nx.disjoint_union(U, H)
return U
|
[
"def",
"disjoint_union_all",
"(",
"graphs",
")",
":",
"graphs",
"=",
"iter",
"(",
"graphs",
")",
"U",
"=",
"next",
"(",
"graphs",
")",
"for",
"H",
"in",
"graphs",
":",
"U",
"=",
"nx",
".",
"disjoint_union",
"(",
"U",
",",
"H",
")",
"return",
"U"
] |
return the disjoint union of all graphs .
|
train
| false
|
50,685
|
def _get_my_ip():
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, _port) = csock.getsockname()
csock.close()
return addr
|
[
"def",
"_get_my_ip",
"(",
")",
":",
"csock",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_DGRAM",
")",
"csock",
".",
"connect",
"(",
"(",
"'8.8.8.8'",
",",
"80",
")",
")",
"(",
"addr",
",",
"_port",
")",
"=",
"csock",
".",
"getsockname",
"(",
")",
"csock",
".",
"close",
"(",
")",
"return",
"addr"
] |
returns the actual ip of the local machine .
|
train
| false
|
50,686
|
def int_from_bool_as_string(subject):
return ((bool_from_string(subject) and 1) or 0)
|
[
"def",
"int_from_bool_as_string",
"(",
"subject",
")",
":",
"return",
"(",
"(",
"bool_from_string",
"(",
"subject",
")",
"and",
"1",
")",
"or",
"0",
")"
] |
interpret a string as a boolean and return either 1 or 0 .
|
train
| false
|
50,687
|
def bridge_exists(br):
cmd = 'ovs-vsctl br-exists {0}'.format(br)
result = __salt__['cmd.run_all'](cmd)
retcode = result['retcode']
return _retcode_to_bool(retcode)
|
[
"def",
"bridge_exists",
"(",
"br",
")",
":",
"cmd",
"=",
"'ovs-vsctl br-exists {0}'",
".",
"format",
"(",
"br",
")",
"result",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"retcode",
"=",
"result",
"[",
"'retcode'",
"]",
"return",
"_retcode_to_bool",
"(",
"retcode",
")"
] |
tests whether bridge exists as a real or fake bridge .
|
train
| true
|
50,688
|
def clean_output(s):
return re.sub('\n( *\n)+', '\n', s).replace(' ', ' ')
|
[
"def",
"clean_output",
"(",
"s",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\n( *\\n)+'",
",",
"'\\n'",
",",
"s",
")",
".",
"replace",
"(",
"' '",
",",
"' '",
")"
] |
remove double newlines with whitespace in between and reduce the level of indentation .
|
train
| false
|
50,689
|
@login_required
def diff_submit(request):
data = {'svn_diff_form': forms.DiffForm(request.user.username), 'svn_diff_error_message': ''}
if (request.method == 'POST'):
temp_svn_directory = tempfile.mkdtemp()
form = forms.DiffForm(request.user.username, temp_svn_directory, request.POST)
if form.is_valid():
form.commit_diff()
view_helpers.set_mission_completed(request.user.get_profile(), 'svn_diff')
return HttpResponseRedirect(reverse('svn_diff'))
data['svn_diff_form'] = form
shutil.rmtree(temp_svn_directory)
request.method = 'GET'
return Diff.as_view()(request, extra_context_data=data)
|
[
"@",
"login_required",
"def",
"diff_submit",
"(",
"request",
")",
":",
"data",
"=",
"{",
"'svn_diff_form'",
":",
"forms",
".",
"DiffForm",
"(",
"request",
".",
"user",
".",
"username",
")",
",",
"'svn_diff_error_message'",
":",
"''",
"}",
"if",
"(",
"request",
".",
"method",
"==",
"'POST'",
")",
":",
"temp_svn_directory",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"form",
"=",
"forms",
".",
"DiffForm",
"(",
"request",
".",
"user",
".",
"username",
",",
"temp_svn_directory",
",",
"request",
".",
"POST",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"form",
".",
"commit_diff",
"(",
")",
"view_helpers",
".",
"set_mission_completed",
"(",
"request",
".",
"user",
".",
"get_profile",
"(",
")",
",",
"'svn_diff'",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'svn_diff'",
")",
")",
"data",
"[",
"'svn_diff_form'",
"]",
"=",
"form",
"shutil",
".",
"rmtree",
"(",
"temp_svn_directory",
")",
"request",
".",
"method",
"=",
"'GET'",
"return",
"Diff",
".",
"as_view",
"(",
")",
"(",
"request",
",",
"extra_context_data",
"=",
"data",
")"
] |
handle submitting the results of an svn diff to the mission 1 .
|
train
| false
|
50,691
|
@frappe.whitelist()
def create_new_folder(file_name, folder):
file = frappe.new_doc(u'File')
file.file_name = file_name
file.is_folder = 1
file.folder = folder
file.insert()
|
[
"@",
"frappe",
".",
"whitelist",
"(",
")",
"def",
"create_new_folder",
"(",
"file_name",
",",
"folder",
")",
":",
"file",
"=",
"frappe",
".",
"new_doc",
"(",
"u'File'",
")",
"file",
".",
"file_name",
"=",
"file_name",
"file",
".",
"is_folder",
"=",
"1",
"file",
".",
"folder",
"=",
"folder",
"file",
".",
"insert",
"(",
")"
] |
create new folder under current parent folder .
|
train
| false
|
50,693
|
@pytest.fixture
def project_store_resources0(project0, subdir0):
from pootle_project.models import ProjectResource
from pootle_store.models import Store
store = subdir0.child_stores.live().first()
resources = Store.objects.live().filter(name=store.name, parent__name=subdir0.name, translation_project__project=project0)
return ProjectResource(resources, ('/projects/%s/%s/%s' % (project0.code, subdir0.name, store.name)))
|
[
"@",
"pytest",
".",
"fixture",
"def",
"project_store_resources0",
"(",
"project0",
",",
"subdir0",
")",
":",
"from",
"pootle_project",
".",
"models",
"import",
"ProjectResource",
"from",
"pootle_store",
".",
"models",
"import",
"Store",
"store",
"=",
"subdir0",
".",
"child_stores",
".",
"live",
"(",
")",
".",
"first",
"(",
")",
"resources",
"=",
"Store",
".",
"objects",
".",
"live",
"(",
")",
".",
"filter",
"(",
"name",
"=",
"store",
".",
"name",
",",
"parent__name",
"=",
"subdir0",
".",
"name",
",",
"translation_project__project",
"=",
"project0",
")",
"return",
"ProjectResource",
"(",
"resources",
",",
"(",
"'/projects/%s/%s/%s'",
"%",
"(",
"project0",
".",
"code",
",",
"subdir0",
".",
"name",
",",
"store",
".",
"name",
")",
")",
")"
] |
returns a projectresource object for a store .
|
train
| false
|
50,695
|
def get_language_objects(site_id=None):
return list(get_languages(site_id))
|
[
"def",
"get_language_objects",
"(",
"site_id",
"=",
"None",
")",
":",
"return",
"list",
"(",
"get_languages",
"(",
"site_id",
")",
")"
] |
returns list of all language objects filled up by default values .
|
train
| false
|
50,696
|
def angle_between_vectors(v0, v1, directed=True, axis=0):
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)
dot = numpy.sum((v0 * v1), axis=axis)
dot /= (vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis))
return numpy.arccos((dot if directed else numpy.fabs(dot)))
|
[
"def",
"angle_between_vectors",
"(",
"v0",
",",
"v1",
",",
"directed",
"=",
"True",
",",
"axis",
"=",
"0",
")",
":",
"v0",
"=",
"numpy",
".",
"array",
"(",
"v0",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"v1",
"=",
"numpy",
".",
"array",
"(",
"v1",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"dot",
"=",
"numpy",
".",
"sum",
"(",
"(",
"v0",
"*",
"v1",
")",
",",
"axis",
"=",
"axis",
")",
"dot",
"/=",
"(",
"vector_norm",
"(",
"v0",
",",
"axis",
"=",
"axis",
")",
"*",
"vector_norm",
"(",
"v1",
",",
"axis",
"=",
"axis",
")",
")",
"return",
"numpy",
".",
"arccos",
"(",
"(",
"dot",
"if",
"directed",
"else",
"numpy",
".",
"fabs",
"(",
"dot",
")",
")",
")"
] |
return angle between vectors .
|
train
| true
|
50,697
|
def _requireSSL(decoratee):
if (SSL is None):
@wraps(decoratee)
def raiseNotImplemented(*a, **kw):
'\n pyOpenSSL is not available.\n\n @param a: The positional arguments for C{decoratee}.\n\n @param kw: The keyword arguments for C{decoratee}.\n\n @raise NotImplementedError: Always.\n '
raise NotImplementedError('SSL support unavailable')
return raiseNotImplemented
return decoratee
|
[
"def",
"_requireSSL",
"(",
"decoratee",
")",
":",
"if",
"(",
"SSL",
"is",
"None",
")",
":",
"@",
"wraps",
"(",
"decoratee",
")",
"def",
"raiseNotImplemented",
"(",
"*",
"a",
",",
"**",
"kw",
")",
":",
"raise",
"NotImplementedError",
"(",
"'SSL support unavailable'",
")",
"return",
"raiseNotImplemented",
"return",
"decoratee"
] |
the decorated method requires pyopenssl to be present .
|
train
| false
|
50,698
|
def document_collection_object(section, collection_model, include_signature=True):
if include_signature:
section.style.start_sphinx_py_attr(collection_model.name)
section.include_doc_string(('A collection of %s resources' % collection_model.resource.type))
|
[
"def",
"document_collection_object",
"(",
"section",
",",
"collection_model",
",",
"include_signature",
"=",
"True",
")",
":",
"if",
"include_signature",
":",
"section",
".",
"style",
".",
"start_sphinx_py_attr",
"(",
"collection_model",
".",
"name",
")",
"section",
".",
"include_doc_string",
"(",
"(",
"'A collection of %s resources'",
"%",
"collection_model",
".",
"resource",
".",
"type",
")",
")"
] |
documents a collection resource object .
|
train
| false
|
50,699
|
def normalize_known_hosts_key(key):
k = key.strip()
k = key.split()
d = dict()
if (k[0][0] == '@'):
d['options'] = k[0]
d['host'] = k[1]
d['type'] = k[2]
d['key'] = k[3]
else:
d['host'] = k[0]
d['type'] = k[1]
d['key'] = k[2]
return d
|
[
"def",
"normalize_known_hosts_key",
"(",
"key",
")",
":",
"k",
"=",
"key",
".",
"strip",
"(",
")",
"k",
"=",
"key",
".",
"split",
"(",
")",
"d",
"=",
"dict",
"(",
")",
"if",
"(",
"k",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"'@'",
")",
":",
"d",
"[",
"'options'",
"]",
"=",
"k",
"[",
"0",
"]",
"d",
"[",
"'host'",
"]",
"=",
"k",
"[",
"1",
"]",
"d",
"[",
"'type'",
"]",
"=",
"k",
"[",
"2",
"]",
"d",
"[",
"'key'",
"]",
"=",
"k",
"[",
"3",
"]",
"else",
":",
"d",
"[",
"'host'",
"]",
"=",
"k",
"[",
"0",
"]",
"d",
"[",
"'type'",
"]",
"=",
"k",
"[",
"1",
"]",
"d",
"[",
"'key'",
"]",
"=",
"k",
"[",
"2",
"]",
"return",
"d"
] |
transform a key .
|
train
| false
|
50,700
|
def copy_strip():
src = join(abspath(dirname(dirname(dirname(__file__)))), 'Doc', 'build', 'html', 'library', 'idle.html')
dst = join(abspath(dirname(__file__)), 'help.html')
with open(src, 'rb') as inn:
with open(dst, 'wb') as out:
for line in inn:
out.write((line.rstrip() + '\n'))
print 'idle.html copied to help.html'
|
[
"def",
"copy_strip",
"(",
")",
":",
"src",
"=",
"join",
"(",
"abspath",
"(",
"dirname",
"(",
"dirname",
"(",
"dirname",
"(",
"__file__",
")",
")",
")",
")",
",",
"'Doc'",
",",
"'build'",
",",
"'html'",
",",
"'library'",
",",
"'idle.html'",
")",
"dst",
"=",
"join",
"(",
"abspath",
"(",
"dirname",
"(",
"__file__",
")",
")",
",",
"'help.html'",
")",
"with",
"open",
"(",
"src",
",",
"'rb'",
")",
"as",
"inn",
":",
"with",
"open",
"(",
"dst",
",",
"'wb'",
")",
"as",
"out",
":",
"for",
"line",
"in",
"inn",
":",
"out",
".",
"write",
"(",
"(",
"line",
".",
"rstrip",
"(",
")",
"+",
"'\\n'",
")",
")",
"print",
"'idle.html copied to help.html'"
] |
copy idle .
|
train
| false
|
50,701
|
def get_stored_content_length(headers):
length = headers.get('x-goog-stored-content-length')
if (length is None):
length = headers.get('content-length')
return length
|
[
"def",
"get_stored_content_length",
"(",
"headers",
")",
":",
"length",
"=",
"headers",
".",
"get",
"(",
"'x-goog-stored-content-length'",
")",
"if",
"(",
"length",
"is",
"None",
")",
":",
"length",
"=",
"headers",
".",
"get",
"(",
"'content-length'",
")",
"return",
"length"
] |
return the content length of the object as stored in gcs .
|
train
| true
|
50,702
|
def flac2wav(path, keep=True):
flac_path = _getFlacPath()
flac_files = []
if path.endswith('.flac'):
flac_files = [path]
elif ((type(path) == str) and os.path.isdir(path)):
flac_files = glob.glob(os.path.join(path, '*.flac'))
if (len(flac_files) == 0):
logging.warn(('failed to find .flac file(s) from %s' % path))
return None
wav_files = []
for flacfile in flac_files:
wavname = (flacfile.strip('.flac') + '.wav')
flac_cmd = [flac_path, '-d', '--totally-silent', '-f', '-o', wavname, flacfile]
(_junk, se) = core.shellCall(flac_cmd, stderr=True)
if se:
logging.error(se)
if (not keep):
os.unlink(flacfile)
wav_files.append(wavname)
if (len(wav_files) == 1):
return wav_files[0]
else:
return wav_files
|
[
"def",
"flac2wav",
"(",
"path",
",",
"keep",
"=",
"True",
")",
":",
"flac_path",
"=",
"_getFlacPath",
"(",
")",
"flac_files",
"=",
"[",
"]",
"if",
"path",
".",
"endswith",
"(",
"'.flac'",
")",
":",
"flac_files",
"=",
"[",
"path",
"]",
"elif",
"(",
"(",
"type",
"(",
"path",
")",
"==",
"str",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
")",
":",
"flac_files",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'*.flac'",
")",
")",
"if",
"(",
"len",
"(",
"flac_files",
")",
"==",
"0",
")",
":",
"logging",
".",
"warn",
"(",
"(",
"'failed to find .flac file(s) from %s'",
"%",
"path",
")",
")",
"return",
"None",
"wav_files",
"=",
"[",
"]",
"for",
"flacfile",
"in",
"flac_files",
":",
"wavname",
"=",
"(",
"flacfile",
".",
"strip",
"(",
"'.flac'",
")",
"+",
"'.wav'",
")",
"flac_cmd",
"=",
"[",
"flac_path",
",",
"'-d'",
",",
"'--totally-silent'",
",",
"'-f'",
",",
"'-o'",
",",
"wavname",
",",
"flacfile",
"]",
"(",
"_junk",
",",
"se",
")",
"=",
"core",
".",
"shellCall",
"(",
"flac_cmd",
",",
"stderr",
"=",
"True",
")",
"if",
"se",
":",
"logging",
".",
"error",
"(",
"se",
")",
"if",
"(",
"not",
"keep",
")",
":",
"os",
".",
"unlink",
"(",
"flacfile",
")",
"wav_files",
".",
"append",
"(",
"wavname",
")",
"if",
"(",
"len",
"(",
"wav_files",
")",
"==",
"1",
")",
":",
"return",
"wav_files",
"[",
"0",
"]",
"else",
":",
"return",
"wav_files"
] |
uncompress: convert .
|
train
| false
|
50,703
|
def is_leaf(cluster):
return (len(cluster) == 1)
|
[
"def",
"is_leaf",
"(",
"cluster",
")",
":",
"return",
"(",
"len",
"(",
"cluster",
")",
"==",
"1",
")"
] |
a cluster is a leaf if it has length 1 .
|
train
| false
|
50,704
|
@profiler.trace
def remove_group_role(request, role, group, domain=None, project=None):
manager = keystoneclient(request, admin=True).roles
return manager.revoke(role=role, group=group, project=project, domain=domain)
|
[
"@",
"profiler",
".",
"trace",
"def",
"remove_group_role",
"(",
"request",
",",
"role",
",",
"group",
",",
"domain",
"=",
"None",
",",
"project",
"=",
"None",
")",
":",
"manager",
"=",
"keystoneclient",
"(",
"request",
",",
"admin",
"=",
"True",
")",
".",
"roles",
"return",
"manager",
".",
"revoke",
"(",
"role",
"=",
"role",
",",
"group",
"=",
"group",
",",
"project",
"=",
"project",
",",
"domain",
"=",
"domain",
")"
] |
removes a given single role for a group from a domain or project .
|
train
| true
|
50,707
|
def _get_tab_registry(win_id, tab_id):
if (tab_id is None):
raise ValueError('Got tab_id None (win_id {})'.format(win_id))
if ((tab_id == 'current') and (win_id is None)):
app = get('app')
window = app.activeWindow()
if ((window is None) or (not hasattr(window, 'win_id'))):
raise RegistryUnavailableError('tab')
win_id = window.win_id
elif (win_id is not None):
window = window_registry[win_id]
else:
raise TypeError('window is None with scope tab!')
if (tab_id == 'current'):
tabbed_browser = get('tabbed-browser', scope='window', window=win_id)
tab = tabbed_browser.currentWidget()
if (tab is None):
raise RegistryUnavailableError('window')
tab_id = tab.tab_id
tab_registry = get('tab-registry', scope='window', window=win_id)
try:
return tab_registry[tab_id].registry
except AttributeError:
raise RegistryUnavailableError('tab')
|
[
"def",
"_get_tab_registry",
"(",
"win_id",
",",
"tab_id",
")",
":",
"if",
"(",
"tab_id",
"is",
"None",
")",
":",
"raise",
"ValueError",
"(",
"'Got tab_id None (win_id {})'",
".",
"format",
"(",
"win_id",
")",
")",
"if",
"(",
"(",
"tab_id",
"==",
"'current'",
")",
"and",
"(",
"win_id",
"is",
"None",
")",
")",
":",
"app",
"=",
"get",
"(",
"'app'",
")",
"window",
"=",
"app",
".",
"activeWindow",
"(",
")",
"if",
"(",
"(",
"window",
"is",
"None",
")",
"or",
"(",
"not",
"hasattr",
"(",
"window",
",",
"'win_id'",
")",
")",
")",
":",
"raise",
"RegistryUnavailableError",
"(",
"'tab'",
")",
"win_id",
"=",
"window",
".",
"win_id",
"elif",
"(",
"win_id",
"is",
"not",
"None",
")",
":",
"window",
"=",
"window_registry",
"[",
"win_id",
"]",
"else",
":",
"raise",
"TypeError",
"(",
"'window is None with scope tab!'",
")",
"if",
"(",
"tab_id",
"==",
"'current'",
")",
":",
"tabbed_browser",
"=",
"get",
"(",
"'tabbed-browser'",
",",
"scope",
"=",
"'window'",
",",
"window",
"=",
"win_id",
")",
"tab",
"=",
"tabbed_browser",
".",
"currentWidget",
"(",
")",
"if",
"(",
"tab",
"is",
"None",
")",
":",
"raise",
"RegistryUnavailableError",
"(",
"'window'",
")",
"tab_id",
"=",
"tab",
".",
"tab_id",
"tab_registry",
"=",
"get",
"(",
"'tab-registry'",
",",
"scope",
"=",
"'window'",
",",
"window",
"=",
"win_id",
")",
"try",
":",
"return",
"tab_registry",
"[",
"tab_id",
"]",
".",
"registry",
"except",
"AttributeError",
":",
"raise",
"RegistryUnavailableError",
"(",
"'tab'",
")"
] |
get the registry of a tab .
|
train
| false
|
50,708
|
def numToHex(num, numBytes):
hexString = ''
if (not isinstance(num, int)):
return ((-1), 'Bad number')
try:
hexNumber = hex(num)[2:]
if ((len(hexNumber) % 2) != 0):
hexNumber = ('0' + hexNumber)
for i in range(0, (len(hexNumber) - 1), 2):
hexString += chr(int((hexNumber[i] + hexNumber[(i + 1)]), 16))
hexString = (('\x00' * (numBytes - len(hexString))) + hexString)
except:
return ((-1), 'Error in hexadecimal conversion')
return (0, hexString)
|
[
"def",
"numToHex",
"(",
"num",
",",
"numBytes",
")",
":",
"hexString",
"=",
"''",
"if",
"(",
"not",
"isinstance",
"(",
"num",
",",
"int",
")",
")",
":",
"return",
"(",
"(",
"-",
"1",
")",
",",
"'Bad number'",
")",
"try",
":",
"hexNumber",
"=",
"hex",
"(",
"num",
")",
"[",
"2",
":",
"]",
"if",
"(",
"(",
"len",
"(",
"hexNumber",
")",
"%",
"2",
")",
"!=",
"0",
")",
":",
"hexNumber",
"=",
"(",
"'0'",
"+",
"hexNumber",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"(",
"len",
"(",
"hexNumber",
")",
"-",
"1",
")",
",",
"2",
")",
":",
"hexString",
"+=",
"chr",
"(",
"int",
"(",
"(",
"hexNumber",
"[",
"i",
"]",
"+",
"hexNumber",
"[",
"(",
"i",
"+",
"1",
")",
"]",
")",
",",
"16",
")",
")",
"hexString",
"=",
"(",
"(",
"'\\x00'",
"*",
"(",
"numBytes",
"-",
"len",
"(",
"hexString",
")",
")",
")",
"+",
"hexString",
")",
"except",
":",
"return",
"(",
"(",
"-",
"1",
")",
",",
"'Error in hexadecimal conversion'",
")",
"return",
"(",
"0",
",",
"hexString",
")"
] |
given a number returns its hexadecimal format with the specified length .
|
train
| false
|
50,709
|
def detrend_none(x):
return x
|
[
"def",
"detrend_none",
"(",
"x",
")",
":",
"return",
"x"
] |
return x: no detrending .
|
train
| false
|
50,710
|
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
message = 'Authentication credentials not found'
success = False
try:
auth_jwt()
success = True
except JWTError as e:
if ((e.headers is not None) and ('WWW-Authenticate' not in e.headers)):
raise NotAuthorizedError(message=((e.error + ': ') + e.description))
if (not success):
results = auth_basic()
if (not results[0]):
if results[1]:
raise NotAuthorizedError(message=results[1])
else:
success = True
if (not success):
if login.current_user.is_authenticated:
g.user = UserModel.query.get(login.current_user.id)
success = True
else:
g.user.update_lat()
if success:
return f(*args, **kwargs)
else:
raise NotAuthorizedError(message=message)
return decorated
|
[
"def",
"requires_auth",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"decorated",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"message",
"=",
"'Authentication credentials not found'",
"success",
"=",
"False",
"try",
":",
"auth_jwt",
"(",
")",
"success",
"=",
"True",
"except",
"JWTError",
"as",
"e",
":",
"if",
"(",
"(",
"e",
".",
"headers",
"is",
"not",
"None",
")",
"and",
"(",
"'WWW-Authenticate'",
"not",
"in",
"e",
".",
"headers",
")",
")",
":",
"raise",
"NotAuthorizedError",
"(",
"message",
"=",
"(",
"(",
"e",
".",
"error",
"+",
"': '",
")",
"+",
"e",
".",
"description",
")",
")",
"if",
"(",
"not",
"success",
")",
":",
"results",
"=",
"auth_basic",
"(",
")",
"if",
"(",
"not",
"results",
"[",
"0",
"]",
")",
":",
"if",
"results",
"[",
"1",
"]",
":",
"raise",
"NotAuthorizedError",
"(",
"message",
"=",
"results",
"[",
"1",
"]",
")",
"else",
":",
"success",
"=",
"True",
"if",
"(",
"not",
"success",
")",
":",
"if",
"login",
".",
"current_user",
".",
"is_authenticated",
":",
"g",
".",
"user",
"=",
"UserModel",
".",
"query",
".",
"get",
"(",
"login",
".",
"current_user",
".",
"id",
")",
"success",
"=",
"True",
"else",
":",
"g",
".",
"user",
".",
"update_lat",
"(",
")",
"if",
"success",
":",
"return",
"f",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"else",
":",
"raise",
"NotAuthorizedError",
"(",
"message",
"=",
"message",
")",
"return",
"decorated"
] |
decorator for :class:tvdbclient methods that require authentication .
|
train
| false
|
50,711
|
def getSimState():
try:
status = '?'
mContext = autoclass('android.content.Context')
pythonActivity = autoclass('org.renpy.android.PythonService')
TelephonyManager = autoclass('android.telephony.TelephonyManager')
telephonyManager = cast('android.telephony.TelephonyManager', pythonActivity.mService.getSystemService(mContext.TELEPHONY_SERVICE))
simState = telephonyManager.getSimState()
if (simState == TelephonyManager.SIM_STATE_UNKNOWN):
status = 'unknown'
elif (simState == TelephonyManager.SIM_STATE_ABSENT):
status = 'absent'
elif (simState == TelephonyManager.SIM_STATE_PIN_REQUIRED):
status = 'pin_required'
elif (simState == TelephonyManager.SIM_STATE_PUK_REQUIRED):
status = 'puk_required'
elif (simState == TelephonyManager.SIM_STATE_NETWORK_LOCKED):
status = 'network_locked'
elif (simState == TelephonyManager.SIM_STATE_READY):
status = 'ready'
return status
except Exception as e:
return None
|
[
"def",
"getSimState",
"(",
")",
":",
"try",
":",
"status",
"=",
"'?'",
"mContext",
"=",
"autoclass",
"(",
"'android.content.Context'",
")",
"pythonActivity",
"=",
"autoclass",
"(",
"'org.renpy.android.PythonService'",
")",
"TelephonyManager",
"=",
"autoclass",
"(",
"'android.telephony.TelephonyManager'",
")",
"telephonyManager",
"=",
"cast",
"(",
"'android.telephony.TelephonyManager'",
",",
"pythonActivity",
".",
"mService",
".",
"getSystemService",
"(",
"mContext",
".",
"TELEPHONY_SERVICE",
")",
")",
"simState",
"=",
"telephonyManager",
".",
"getSimState",
"(",
")",
"if",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_UNKNOWN",
")",
":",
"status",
"=",
"'unknown'",
"elif",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_ABSENT",
")",
":",
"status",
"=",
"'absent'",
"elif",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_PIN_REQUIRED",
")",
":",
"status",
"=",
"'pin_required'",
"elif",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_PUK_REQUIRED",
")",
":",
"status",
"=",
"'puk_required'",
"elif",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_NETWORK_LOCKED",
")",
":",
"status",
"=",
"'network_locked'",
"elif",
"(",
"simState",
"==",
"TelephonyManager",
".",
"SIM_STATE_READY",
")",
":",
"status",
"=",
"'ready'",
"return",
"status",
"except",
"Exception",
"as",
"e",
":",
"return",
"None"
] |
returns a string indicating the state of the default sim card returns none if an error .
|
train
| false
|
50,713
|
def test_csv_table_read():
lines = ['# a, b', '1, 2', '3, 4']
t = ascii.read(lines)
assert (t.colnames == ['a', 'b'])
|
[
"def",
"test_csv_table_read",
"(",
")",
":",
"lines",
"=",
"[",
"'# a, b'",
",",
"'1, 2'",
",",
"'3, 4'",
"]",
"t",
"=",
"ascii",
".",
"read",
"(",
"lines",
")",
"assert",
"(",
"t",
".",
"colnames",
"==",
"[",
"'a'",
",",
"'b'",
"]",
")"
] |
check for a regression introduced by #1935 .
|
train
| false
|
50,715
|
def has_results(errors=None, failures=None, skipped=None, expected_failures=None, unexpected_successes=None, tests_run=None):
if (errors is None):
errors = Equals([])
if (failures is None):
failures = Equals([])
if (skipped is None):
skipped = Equals([])
if (expected_failures is None):
expected_failures = Equals([])
if (unexpected_successes is None):
unexpected_successes = Equals([])
if (tests_run is None):
tests_run = Equals(0)
return MatchesStructure(errors=errors, failures=failures, skipped=skipped, expectedFailures=expected_failures, unexpectedSuccesses=unexpected_successes, testsRun=tests_run)
|
[
"def",
"has_results",
"(",
"errors",
"=",
"None",
",",
"failures",
"=",
"None",
",",
"skipped",
"=",
"None",
",",
"expected_failures",
"=",
"None",
",",
"unexpected_successes",
"=",
"None",
",",
"tests_run",
"=",
"None",
")",
":",
"if",
"(",
"errors",
"is",
"None",
")",
":",
"errors",
"=",
"Equals",
"(",
"[",
"]",
")",
"if",
"(",
"failures",
"is",
"None",
")",
":",
"failures",
"=",
"Equals",
"(",
"[",
"]",
")",
"if",
"(",
"skipped",
"is",
"None",
")",
":",
"skipped",
"=",
"Equals",
"(",
"[",
"]",
")",
"if",
"(",
"expected_failures",
"is",
"None",
")",
":",
"expected_failures",
"=",
"Equals",
"(",
"[",
"]",
")",
"if",
"(",
"unexpected_successes",
"is",
"None",
")",
":",
"unexpected_successes",
"=",
"Equals",
"(",
"[",
"]",
")",
"if",
"(",
"tests_run",
"is",
"None",
")",
":",
"tests_run",
"=",
"Equals",
"(",
"0",
")",
"return",
"MatchesStructure",
"(",
"errors",
"=",
"errors",
",",
"failures",
"=",
"failures",
",",
"skipped",
"=",
"skipped",
",",
"expectedFailures",
"=",
"expected_failures",
",",
"unexpectedSuccesses",
"=",
"unexpected_successes",
",",
"testsRun",
"=",
"tests_run",
")"
] |
return a matcher on test results .
|
train
| false
|
50,716
|
def _check_reference(inst):
if _needs_eeg_average_ref_proj(inst.info):
raise ValueError('EEG average reference is mandatory for inverse modeling, use set_eeg_reference method.')
if inst.info['custom_ref_applied']:
raise ValueError('Custom EEG reference is not allowed for inverse modeling.')
|
[
"def",
"_check_reference",
"(",
"inst",
")",
":",
"if",
"_needs_eeg_average_ref_proj",
"(",
"inst",
".",
"info",
")",
":",
"raise",
"ValueError",
"(",
"'EEG average reference is mandatory for inverse modeling, use set_eeg_reference method.'",
")",
"if",
"inst",
".",
"info",
"[",
"'custom_ref_applied'",
"]",
":",
"raise",
"ValueError",
"(",
"'Custom EEG reference is not allowed for inverse modeling.'",
")"
] |
check for eeg ref .
|
train
| false
|
50,717
|
def _fix_related_fields(srv, data, service_ids):
if (srv[0] not in RELATED_FIELDS):
return data
for field in RELATED_FIELDS[srv[0]]:
if (field[0] not in data):
data[field[1]] = None
continue
old_value = data[field[0]]
if (type(old_value) == list):
ls = []
for i in old_value:
old_id = i['id']
new_id = service_ids[field[2]][old_id]
ls += [new_id]
del data[field[0]]
data[field[1]] = ls
else:
if (type(old_value) == dict):
old_id = old_value['id']
else:
old_id = old_value
del data[field[0]]
if (old_id is None):
data[field[1]] = None
else:
data[field[1]] = service_ids[field[2]][old_id]
return data
|
[
"def",
"_fix_related_fields",
"(",
"srv",
",",
"data",
",",
"service_ids",
")",
":",
"if",
"(",
"srv",
"[",
"0",
"]",
"not",
"in",
"RELATED_FIELDS",
")",
":",
"return",
"data",
"for",
"field",
"in",
"RELATED_FIELDS",
"[",
"srv",
"[",
"0",
"]",
"]",
":",
"if",
"(",
"field",
"[",
"0",
"]",
"not",
"in",
"data",
")",
":",
"data",
"[",
"field",
"[",
"1",
"]",
"]",
"=",
"None",
"continue",
"old_value",
"=",
"data",
"[",
"field",
"[",
"0",
"]",
"]",
"if",
"(",
"type",
"(",
"old_value",
")",
"==",
"list",
")",
":",
"ls",
"=",
"[",
"]",
"for",
"i",
"in",
"old_value",
":",
"old_id",
"=",
"i",
"[",
"'id'",
"]",
"new_id",
"=",
"service_ids",
"[",
"field",
"[",
"2",
"]",
"]",
"[",
"old_id",
"]",
"ls",
"+=",
"[",
"new_id",
"]",
"del",
"data",
"[",
"field",
"[",
"0",
"]",
"]",
"data",
"[",
"field",
"[",
"1",
"]",
"]",
"=",
"ls",
"else",
":",
"if",
"(",
"type",
"(",
"old_value",
")",
"==",
"dict",
")",
":",
"old_id",
"=",
"old_value",
"[",
"'id'",
"]",
"else",
":",
"old_id",
"=",
"old_value",
"del",
"data",
"[",
"field",
"[",
"0",
"]",
"]",
"if",
"(",
"old_id",
"is",
"None",
")",
":",
"data",
"[",
"field",
"[",
"1",
"]",
"]",
"=",
"None",
"else",
":",
"data",
"[",
"field",
"[",
"1",
"]",
"]",
"=",
"service_ids",
"[",
"field",
"[",
"2",
"]",
"]",
"[",
"old_id",
"]",
"return",
"data"
] |
fixes the ids services which are related to others .
|
train
| false
|
50,718
|
def select_random_ports(n):
ports = []
sockets = []
for i in range(n):
sock = socket.socket()
sock.bind(('', 0))
ports.append(sock.getsockname()[1])
sockets.append(sock)
for sock in sockets:
sock.close()
return ports
|
[
"def",
"select_random_ports",
"(",
"n",
")",
":",
"ports",
"=",
"[",
"]",
"sockets",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"n",
")",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
")",
"sock",
".",
"bind",
"(",
"(",
"''",
",",
"0",
")",
")",
"ports",
".",
"append",
"(",
"sock",
".",
"getsockname",
"(",
")",
"[",
"1",
"]",
")",
"sockets",
".",
"append",
"(",
"sock",
")",
"for",
"sock",
"in",
"sockets",
":",
"sock",
".",
"close",
"(",
")",
"return",
"ports"
] |
select and return n random ports that are available .
|
train
| false
|
50,719
|
@verbose
def compare_fiff(fname_1, fname_2, fname_out=None, show=True, indent=' ', read_limit=np.inf, max_str=30, verbose=None):
file_1 = show_fiff(fname_1, output=list, indent=indent, read_limit=read_limit, max_str=max_str)
file_2 = show_fiff(fname_2, output=list, indent=indent, read_limit=read_limit, max_str=max_str)
diff = difflib.HtmlDiff().make_file(file_1, file_2, fname_1, fname_2)
if (fname_out is not None):
f = open(fname_out, 'wb')
else:
f = tempfile.NamedTemporaryFile('wb', delete=False, suffix='.html')
fname_out = f.name
with f as fid:
fid.write(diff.encode('utf-8'))
if (show is True):
webbrowser.open_new_tab(fname_out)
return fname_out
|
[
"@",
"verbose",
"def",
"compare_fiff",
"(",
"fname_1",
",",
"fname_2",
",",
"fname_out",
"=",
"None",
",",
"show",
"=",
"True",
",",
"indent",
"=",
"' '",
",",
"read_limit",
"=",
"np",
".",
"inf",
",",
"max_str",
"=",
"30",
",",
"verbose",
"=",
"None",
")",
":",
"file_1",
"=",
"show_fiff",
"(",
"fname_1",
",",
"output",
"=",
"list",
",",
"indent",
"=",
"indent",
",",
"read_limit",
"=",
"read_limit",
",",
"max_str",
"=",
"max_str",
")",
"file_2",
"=",
"show_fiff",
"(",
"fname_2",
",",
"output",
"=",
"list",
",",
"indent",
"=",
"indent",
",",
"read_limit",
"=",
"read_limit",
",",
"max_str",
"=",
"max_str",
")",
"diff",
"=",
"difflib",
".",
"HtmlDiff",
"(",
")",
".",
"make_file",
"(",
"file_1",
",",
"file_2",
",",
"fname_1",
",",
"fname_2",
")",
"if",
"(",
"fname_out",
"is",
"not",
"None",
")",
":",
"f",
"=",
"open",
"(",
"fname_out",
",",
"'wb'",
")",
"else",
":",
"f",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"'wb'",
",",
"delete",
"=",
"False",
",",
"suffix",
"=",
"'.html'",
")",
"fname_out",
"=",
"f",
".",
"name",
"with",
"f",
"as",
"fid",
":",
"fid",
".",
"write",
"(",
"diff",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"if",
"(",
"show",
"is",
"True",
")",
":",
"webbrowser",
".",
"open_new_tab",
"(",
"fname_out",
")",
"return",
"fname_out"
] |
compare the contents of two fiff files using diff and show_fiff .
|
train
| false
|
50,721
|
def _get_size_linux():
def ioctl_GWINSZ(fd):
'\n Attempt to discover the dimensions of a terminal window, using IOCTL.\n '
try:
import fcntl, termios
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
return None
return cr
cr = (ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2))
if (not cr):
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if (not cr):
try:
cr = (env['LINES'], env['COLUMNS'])
except:
return None
return (int(cr[1]), int(cr[0]))
|
[
"def",
"_get_size_linux",
"(",
")",
":",
"def",
"ioctl_GWINSZ",
"(",
"fd",
")",
":",
"try",
":",
"import",
"fcntl",
",",
"termios",
"cr",
"=",
"struct",
".",
"unpack",
"(",
"'hh'",
",",
"fcntl",
".",
"ioctl",
"(",
"fd",
",",
"termios",
".",
"TIOCGWINSZ",
",",
"'1234'",
")",
")",
"except",
":",
"return",
"None",
"return",
"cr",
"cr",
"=",
"(",
"ioctl_GWINSZ",
"(",
"0",
")",
"or",
"ioctl_GWINSZ",
"(",
"1",
")",
"or",
"ioctl_GWINSZ",
"(",
"2",
")",
")",
"if",
"(",
"not",
"cr",
")",
":",
"try",
":",
"fd",
"=",
"os",
".",
"open",
"(",
"os",
".",
"ctermid",
"(",
")",
",",
"os",
".",
"O_RDONLY",
")",
"cr",
"=",
"ioctl_GWINSZ",
"(",
"fd",
")",
"os",
".",
"close",
"(",
"fd",
")",
"except",
":",
"pass",
"if",
"(",
"not",
"cr",
")",
":",
"try",
":",
"cr",
"=",
"(",
"env",
"[",
"'LINES'",
"]",
",",
"env",
"[",
"'COLUMNS'",
"]",
")",
"except",
":",
"return",
"None",
"return",
"(",
"int",
"(",
"cr",
"[",
"1",
"]",
")",
",",
"int",
"(",
"cr",
"[",
"0",
"]",
")",
")"
] |
attempt to discover the dimensions of a terminal window .
|
train
| false
|
50,722
|
def primary_collator():
global _primary_collator
if (_primary_collator is None):
_primary_collator = collator().clone()
_primary_collator.strength = _icu.UCOL_PRIMARY
return _primary_collator
|
[
"def",
"primary_collator",
"(",
")",
":",
"global",
"_primary_collator",
"if",
"(",
"_primary_collator",
"is",
"None",
")",
":",
"_primary_collator",
"=",
"collator",
"(",
")",
".",
"clone",
"(",
")",
"_primary_collator",
".",
"strength",
"=",
"_icu",
".",
"UCOL_PRIMARY",
"return",
"_primary_collator"
] |
ignores case differences and accented characters .
|
train
| false
|
50,723
|
def getFirstValue(gcodeText, word):
for line in archive.getTextLines(gcodeText):
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
if (gcodec.getFirstWord(splitLine) == word):
return splitLine[1]
return ''
|
[
"def",
"getFirstValue",
"(",
"gcodeText",
",",
"word",
")",
":",
"for",
"line",
"in",
"archive",
".",
"getTextLines",
"(",
"gcodeText",
")",
":",
"splitLine",
"=",
"gcodec",
".",
"getSplitLineBeforeBracketSemicolon",
"(",
"line",
")",
"if",
"(",
"gcodec",
".",
"getFirstWord",
"(",
"splitLine",
")",
"==",
"word",
")",
":",
"return",
"splitLine",
"[",
"1",
"]",
"return",
"''"
] |
get the value from the first line which starts with the given word .
|
train
| false
|
50,724
|
def mergedicts_copy(d1, d2):
ret = d1.copy()
_setmerged(ret, d2)
for k in d2:
if ((k in d1) and isinstance(d1[k], dict) and isinstance(d2[k], dict)):
ret[k] = mergedicts_copy(d1[k], d2[k])
else:
ret[k] = d2[k]
return ret
|
[
"def",
"mergedicts_copy",
"(",
"d1",
",",
"d2",
")",
":",
"ret",
"=",
"d1",
".",
"copy",
"(",
")",
"_setmerged",
"(",
"ret",
",",
"d2",
")",
"for",
"k",
"in",
"d2",
":",
"if",
"(",
"(",
"k",
"in",
"d1",
")",
"and",
"isinstance",
"(",
"d1",
"[",
"k",
"]",
",",
"dict",
")",
"and",
"isinstance",
"(",
"d2",
"[",
"k",
"]",
",",
"dict",
")",
")",
":",
"ret",
"[",
"k",
"]",
"=",
"mergedicts_copy",
"(",
"d1",
"[",
"k",
"]",
",",
"d2",
"[",
"k",
"]",
")",
"else",
":",
"ret",
"[",
"k",
"]",
"=",
"d2",
"[",
"k",
"]",
"return",
"ret"
] |
recursively merge two dictionaries .
|
train
| false
|
50,725
|
def config_option_list(context, data_dict):
return {'success': False}
|
[
"def",
"config_option_list",
"(",
"context",
",",
"data_dict",
")",
":",
"return",
"{",
"'success'",
":",
"False",
"}"
] |
list runtime-editable configuration options .
|
train
| false
|
50,727
|
def count_seqs_in_filepaths(fasta_filepaths, seq_counter=count_seqs):
total = 0
counts = []
inaccessible_filepaths = []
for fasta_filepath in fasta_filepaths:
if (fasta_filepath.endswith('.fastq') or fasta_filepath.endswith('.fq')):
parser = partial(parse_fastq, enforce_qual_range=False)
elif (fasta_filepath.endswith('.tre') or fasta_filepath.endswith('.ph') or fasta_filepath.endswith('.ntree')):
def parser(f):
t = DndParser(f, constructor=PhyloNode)
return zip(t.iterTips(), repeat(''))
else:
parser = parse_fasta
try:
current_count = seq_counter(fasta_filepath, parser=parser)
counts.append((current_count, fasta_filepath))
total += current_count[0]
except IOError:
inaccessible_filepaths.append(fasta_filepath)
return (counts, total, inaccessible_filepaths)
|
[
"def",
"count_seqs_in_filepaths",
"(",
"fasta_filepaths",
",",
"seq_counter",
"=",
"count_seqs",
")",
":",
"total",
"=",
"0",
"counts",
"=",
"[",
"]",
"inaccessible_filepaths",
"=",
"[",
"]",
"for",
"fasta_filepath",
"in",
"fasta_filepaths",
":",
"if",
"(",
"fasta_filepath",
".",
"endswith",
"(",
"'.fastq'",
")",
"or",
"fasta_filepath",
".",
"endswith",
"(",
"'.fq'",
")",
")",
":",
"parser",
"=",
"partial",
"(",
"parse_fastq",
",",
"enforce_qual_range",
"=",
"False",
")",
"elif",
"(",
"fasta_filepath",
".",
"endswith",
"(",
"'.tre'",
")",
"or",
"fasta_filepath",
".",
"endswith",
"(",
"'.ph'",
")",
"or",
"fasta_filepath",
".",
"endswith",
"(",
"'.ntree'",
")",
")",
":",
"def",
"parser",
"(",
"f",
")",
":",
"t",
"=",
"DndParser",
"(",
"f",
",",
"constructor",
"=",
"PhyloNode",
")",
"return",
"zip",
"(",
"t",
".",
"iterTips",
"(",
")",
",",
"repeat",
"(",
"''",
")",
")",
"else",
":",
"parser",
"=",
"parse_fasta",
"try",
":",
"current_count",
"=",
"seq_counter",
"(",
"fasta_filepath",
",",
"parser",
"=",
"parser",
")",
"counts",
".",
"append",
"(",
"(",
"current_count",
",",
"fasta_filepath",
")",
")",
"total",
"+=",
"current_count",
"[",
"0",
"]",
"except",
"IOError",
":",
"inaccessible_filepaths",
".",
"append",
"(",
"fasta_filepath",
")",
"return",
"(",
"counts",
",",
"total",
",",
"inaccessible_filepaths",
")"
] |
wrapper to apply seq_counter to fasta_filepaths fasta_filepaths: list of one or more fasta filepaths seq_counter: a function which takes a single filepath and returns the count of the number of sequences -- this is parameterized to facilitate unit testing .
|
train
| false
|
50,728
|
def provider_handlers(providers):
return _load_provider_feature('default_handlers', providers)
|
[
"def",
"provider_handlers",
"(",
"providers",
")",
":",
"return",
"_load_provider_feature",
"(",
"'default_handlers'",
",",
"providers",
")"
] |
load tornado url handlers from an ordered list of dotted-notation modules which contain a default_handlers function default_handlers should accept a list of handlers and returns an augmented list of handlers: this allows the addition of .
|
train
| false
|
50,730
|
def test_fit_sample_auto():
ratio = 'auto'
cc = ClusterCentroids(ratio=ratio, random_state=RND_SEED)
(X_resampled, y_resampled) = cc.fit_sample(X, Y)
X_gt = np.array([[0.92923648, 0.76103773], [0.47104475, 0.44386323], [0.13347175, 0.12167502], [0.06738818, (-0.529627)], [0.17901516, 0.69860992], [0.094035, (-2.55298982)]])
y_gt = np.array([0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
|
[
"def",
"test_fit_sample_auto",
"(",
")",
":",
"ratio",
"=",
"'auto'",
"cc",
"=",
"ClusterCentroids",
"(",
"ratio",
"=",
"ratio",
",",
"random_state",
"=",
"RND_SEED",
")",
"(",
"X_resampled",
",",
"y_resampled",
")",
"=",
"cc",
".",
"fit_sample",
"(",
"X",
",",
"Y",
")",
"X_gt",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.92923648",
",",
"0.76103773",
"]",
",",
"[",
"0.47104475",
",",
"0.44386323",
"]",
",",
"[",
"0.13347175",
",",
"0.12167502",
"]",
",",
"[",
"0.06738818",
",",
"(",
"-",
"0.529627",
")",
"]",
",",
"[",
"0.17901516",
",",
"0.69860992",
"]",
",",
"[",
"0.094035",
",",
"(",
"-",
"2.55298982",
")",
"]",
"]",
")",
"y_gt",
"=",
"np",
".",
"array",
"(",
"[",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"1",
",",
"1",
"]",
")",
"assert_allclose",
"(",
"X_resampled",
",",
"X_gt",
",",
"rtol",
"=",
"R_TOL",
")",
"assert_array_equal",
"(",
"y_resampled",
",",
"y_gt",
")"
] |
test fit and sample routines with auto ratio .
|
train
| false
|
50,732
|
def libvlc_media_player_set_time(p_mi, i_time):
f = (_Cfunctions.get('libvlc_media_player_set_time', None) or _Cfunction('libvlc_media_player_set_time', ((1,), (1,)), None, None, MediaPlayer, ctypes.c_longlong))
return f(p_mi, i_time)
|
[
"def",
"libvlc_media_player_set_time",
"(",
"p_mi",
",",
"i_time",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_player_set_time'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_player_set_time'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
")",
",",
"None",
",",
"None",
",",
"MediaPlayer",
",",
"ctypes",
".",
"c_longlong",
")",
")",
"return",
"f",
"(",
"p_mi",
",",
"i_time",
")"
] |
set the movie time .
|
train
| true
|
50,733
|
def requires_application(backend=None, has=(), capable=()):
(good, msg) = has_application(backend, has, capable)
dec_backend = np.testing.dec.skipif((not good), ('Skipping test: %s' % msg))
try:
import pytest
except Exception:
return dec_backend
dec_app = pytest.mark.vispy_app_test
return composed(dec_app, dec_backend)
|
[
"def",
"requires_application",
"(",
"backend",
"=",
"None",
",",
"has",
"=",
"(",
")",
",",
"capable",
"=",
"(",
")",
")",
":",
"(",
"good",
",",
"msg",
")",
"=",
"has_application",
"(",
"backend",
",",
"has",
",",
"capable",
")",
"dec_backend",
"=",
"np",
".",
"testing",
".",
"dec",
".",
"skipif",
"(",
"(",
"not",
"good",
")",
",",
"(",
"'Skipping test: %s'",
"%",
"msg",
")",
")",
"try",
":",
"import",
"pytest",
"except",
"Exception",
":",
"return",
"dec_backend",
"dec_app",
"=",
"pytest",
".",
"mark",
".",
"vispy_app_test",
"return",
"composed",
"(",
"dec_app",
",",
"dec_backend",
")"
] |
return a decorator for tests that require an application .
|
train
| false
|
50,734
|
def get_v1_mutation_key_and_entity(v1_mutation):
if v1_mutation.HasField('delete'):
return (v1_mutation.delete, None)
else:
v1_entity = getattr(v1_mutation, v1_mutation.WhichOneof('operation'))
return (v1_entity.key, v1_entity)
|
[
"def",
"get_v1_mutation_key_and_entity",
"(",
"v1_mutation",
")",
":",
"if",
"v1_mutation",
".",
"HasField",
"(",
"'delete'",
")",
":",
"return",
"(",
"v1_mutation",
".",
"delete",
",",
"None",
")",
"else",
":",
"v1_entity",
"=",
"getattr",
"(",
"v1_mutation",
",",
"v1_mutation",
".",
"WhichOneof",
"(",
"'operation'",
")",
")",
"return",
"(",
"v1_entity",
".",
"key",
",",
"v1_entity",
")"
] |
returns the v1 key and entity for a v1 mutation proto .
|
train
| false
|
50,736
|
def _guess_node_id(node):
if (node is None):
return __salt__['grains.get']('id')
return node
|
[
"def",
"_guess_node_id",
"(",
"node",
")",
":",
"if",
"(",
"node",
"is",
"None",
")",
":",
"return",
"__salt__",
"[",
"'grains.get'",
"]",
"(",
"'id'",
")",
"return",
"node"
] |
try to guess kube node id using salt minion id .
|
train
| false
|
50,737
|
def errfunc(*args):
raise ValueError
|
[
"def",
"errfunc",
"(",
"*",
"args",
")",
":",
"raise",
"ValueError"
] |
test function that raises an error .
|
train
| false
|
50,738
|
def dirichlet_expectation(alpha):
if (len(alpha.shape) == 1):
result = (psi(alpha) - psi(np.sum(alpha)))
else:
result = (psi(alpha) - psi(np.sum(alpha, 1))[:, np.newaxis])
return result.astype(alpha.dtype)
|
[
"def",
"dirichlet_expectation",
"(",
"alpha",
")",
":",
"if",
"(",
"len",
"(",
"alpha",
".",
"shape",
")",
"==",
"1",
")",
":",
"result",
"=",
"(",
"psi",
"(",
"alpha",
")",
"-",
"psi",
"(",
"np",
".",
"sum",
"(",
"alpha",
")",
")",
")",
"else",
":",
"result",
"=",
"(",
"psi",
"(",
"alpha",
")",
"-",
"psi",
"(",
"np",
".",
"sum",
"(",
"alpha",
",",
"1",
")",
")",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
")",
"return",
"result",
".",
"astype",
"(",
"alpha",
".",
"dtype",
")"
] |
for a vector theta~dir .
|
train
| false
|
50,740
|
def setTitle(template, title, chapterNumber):
if (numberer.getNumberSections() and chapterNumber):
titleNode = dom.Text()
titleNode.ownerDocument = template.ownerDocument
titleNode.data = ('%s. ' % (chapterNumber,))
title.insert(0, titleNode)
for nodeList in (domhelpers.findNodesNamed(template, 'title'), domhelpers.findElementsWithAttribute(template, 'class', 'title')):
if nodeList:
for titleNode in title:
nodeList[0].appendChild(titleNode.cloneNode(True))
|
[
"def",
"setTitle",
"(",
"template",
",",
"title",
",",
"chapterNumber",
")",
":",
"if",
"(",
"numberer",
".",
"getNumberSections",
"(",
")",
"and",
"chapterNumber",
")",
":",
"titleNode",
"=",
"dom",
".",
"Text",
"(",
")",
"titleNode",
".",
"ownerDocument",
"=",
"template",
".",
"ownerDocument",
"titleNode",
".",
"data",
"=",
"(",
"'%s. '",
"%",
"(",
"chapterNumber",
",",
")",
")",
"title",
".",
"insert",
"(",
"0",
",",
"titleNode",
")",
"for",
"nodeList",
"in",
"(",
"domhelpers",
".",
"findNodesNamed",
"(",
"template",
",",
"'title'",
")",
",",
"domhelpers",
".",
"findElementsWithAttribute",
"(",
"template",
",",
"'class'",
",",
"'title'",
")",
")",
":",
"if",
"nodeList",
":",
"for",
"titleNode",
"in",
"title",
":",
"nodeList",
"[",
"0",
"]",
".",
"appendChild",
"(",
"titleNode",
".",
"cloneNode",
"(",
"True",
")",
")"
] |
add title and chapter number information to the template document .
|
train
| false
|
50,741
|
def _bdecode(s):
if (not s):
return s
return base64.decodestring(s)
|
[
"def",
"_bdecode",
"(",
"s",
")",
":",
"if",
"(",
"not",
"s",
")",
":",
"return",
"s",
"return",
"base64",
".",
"decodestring",
"(",
"s",
")"
] |
decodes a base64 string .
|
train
| false
|
50,744
|
@csrf_exempt
def sql_explain(request):
form = SQLSelectForm((request.POST or None))
if form.is_valid():
sql = form.cleaned_data[u'raw_sql']
params = form.cleaned_data[u'params']
vendor = form.connection.vendor
cursor = form.cursor
if (vendor == u'sqlite'):
cursor.execute((u'EXPLAIN QUERY PLAN %s' % (sql,)), params)
elif (vendor == u'postgresql'):
cursor.execute((u'EXPLAIN ANALYZE %s' % (sql,)), params)
else:
cursor.execute((u'EXPLAIN %s' % (sql,)), params)
headers = [d[0] for d in cursor.description]
result = cursor.fetchall()
cursor.close()
context = {u'result': result, u'sql': form.reformat_sql(), u'duration': form.cleaned_data[u'duration'], u'headers': headers, u'alias': form.cleaned_data[u'alias']}
return render_to_response(u'debug_toolbar/panels/sql_explain.html', context)
return HttpResponseBadRequest(u'Form errors')
|
[
"@",
"csrf_exempt",
"def",
"sql_explain",
"(",
"request",
")",
":",
"form",
"=",
"SQLSelectForm",
"(",
"(",
"request",
".",
"POST",
"or",
"None",
")",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"sql",
"=",
"form",
".",
"cleaned_data",
"[",
"u'raw_sql'",
"]",
"params",
"=",
"form",
".",
"cleaned_data",
"[",
"u'params'",
"]",
"vendor",
"=",
"form",
".",
"connection",
".",
"vendor",
"cursor",
"=",
"form",
".",
"cursor",
"if",
"(",
"vendor",
"==",
"u'sqlite'",
")",
":",
"cursor",
".",
"execute",
"(",
"(",
"u'EXPLAIN QUERY PLAN %s'",
"%",
"(",
"sql",
",",
")",
")",
",",
"params",
")",
"elif",
"(",
"vendor",
"==",
"u'postgresql'",
")",
":",
"cursor",
".",
"execute",
"(",
"(",
"u'EXPLAIN ANALYZE %s'",
"%",
"(",
"sql",
",",
")",
")",
",",
"params",
")",
"else",
":",
"cursor",
".",
"execute",
"(",
"(",
"u'EXPLAIN %s'",
"%",
"(",
"sql",
",",
")",
")",
",",
"params",
")",
"headers",
"=",
"[",
"d",
"[",
"0",
"]",
"for",
"d",
"in",
"cursor",
".",
"description",
"]",
"result",
"=",
"cursor",
".",
"fetchall",
"(",
")",
"cursor",
".",
"close",
"(",
")",
"context",
"=",
"{",
"u'result'",
":",
"result",
",",
"u'sql'",
":",
"form",
".",
"reformat_sql",
"(",
")",
",",
"u'duration'",
":",
"form",
".",
"cleaned_data",
"[",
"u'duration'",
"]",
",",
"u'headers'",
":",
"headers",
",",
"u'alias'",
":",
"form",
".",
"cleaned_data",
"[",
"u'alias'",
"]",
"}",
"return",
"render_to_response",
"(",
"u'debug_toolbar/panels/sql_explain.html'",
",",
"context",
")",
"return",
"HttpResponseBadRequest",
"(",
"u'Form errors'",
")"
] |
returns the output of the sql explain on the given query .
|
train
| false
|
50,745
|
def health_state(consul_url=None, state=None, **kwargs):
ret = {}
query_params = {}
if (not consul_url):
consul_url = _get_config()
if (not consul_url):
log.error('No Consul URL found.')
ret['message'] = 'No Consul URL found.'
ret['res'] = False
return ret
if (not state):
raise SaltInvocationError('Required argument "state" is missing.')
if ('dc' in kwargs):
query_params['dc'] = kwargs['dc']
if (state not in ('any', 'unknown', 'passing', 'warning', 'critical')):
ret['message'] = 'State must be any, unknown, passing, warning, or critical.'
ret['res'] = False
return ret
function = 'health/state/{0}'.format(state)
ret = _query(consul_url=consul_url, function=function, query_params=query_params)
return ret
|
[
"def",
"health_state",
"(",
"consul_url",
"=",
"None",
",",
"state",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"{",
"}",
"query_params",
"=",
"{",
"}",
"if",
"(",
"not",
"consul_url",
")",
":",
"consul_url",
"=",
"_get_config",
"(",
")",
"if",
"(",
"not",
"consul_url",
")",
":",
"log",
".",
"error",
"(",
"'No Consul URL found.'",
")",
"ret",
"[",
"'message'",
"]",
"=",
"'No Consul URL found.'",
"ret",
"[",
"'res'",
"]",
"=",
"False",
"return",
"ret",
"if",
"(",
"not",
"state",
")",
":",
"raise",
"SaltInvocationError",
"(",
"'Required argument \"state\" is missing.'",
")",
"if",
"(",
"'dc'",
"in",
"kwargs",
")",
":",
"query_params",
"[",
"'dc'",
"]",
"=",
"kwargs",
"[",
"'dc'",
"]",
"if",
"(",
"state",
"not",
"in",
"(",
"'any'",
",",
"'unknown'",
",",
"'passing'",
",",
"'warning'",
",",
"'critical'",
")",
")",
":",
"ret",
"[",
"'message'",
"]",
"=",
"'State must be any, unknown, passing, warning, or critical.'",
"ret",
"[",
"'res'",
"]",
"=",
"False",
"return",
"ret",
"function",
"=",
"'health/state/{0}'",
".",
"format",
"(",
"state",
")",
"ret",
"=",
"_query",
"(",
"consul_url",
"=",
"consul_url",
",",
"function",
"=",
"function",
",",
"query_params",
"=",
"query_params",
")",
"return",
"ret"
] |
returns the checks in the state provided on the path .
|
train
| true
|
50,746
|
def maybe_schedule(s, relative=False, app=None):
if (s is not None):
if isinstance(s, numbers.Number):
s = timedelta(seconds=s)
if isinstance(s, timedelta):
return schedule(s, relative, app=app)
else:
s.app = app
return s
|
[
"def",
"maybe_schedule",
"(",
"s",
",",
"relative",
"=",
"False",
",",
"app",
"=",
"None",
")",
":",
"if",
"(",
"s",
"is",
"not",
"None",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"numbers",
".",
"Number",
")",
":",
"s",
"=",
"timedelta",
"(",
"seconds",
"=",
"s",
")",
"if",
"isinstance",
"(",
"s",
",",
"timedelta",
")",
":",
"return",
"schedule",
"(",
"s",
",",
"relative",
",",
"app",
"=",
"app",
")",
"else",
":",
"s",
".",
"app",
"=",
"app",
"return",
"s"
] |
return schedule from number .
|
train
| false
|
50,747
|
def _get_css_imports_regex(data):
urls = []
for pattern in _CSS_URL_PATTERNS:
for match in pattern.finditer(data):
url = match.group('url')
if url:
urls.append(url)
return urls
|
[
"def",
"_get_css_imports_regex",
"(",
"data",
")",
":",
"urls",
"=",
"[",
"]",
"for",
"pattern",
"in",
"_CSS_URL_PATTERNS",
":",
"for",
"match",
"in",
"pattern",
".",
"finditer",
"(",
"data",
")",
":",
"url",
"=",
"match",
".",
"group",
"(",
"'url'",
")",
"if",
"url",
":",
"urls",
".",
"append",
"(",
"url",
")",
"return",
"urls"
] |
return all assets that are referenced in the given css document .
|
train
| false
|
50,748
|
def to_7L5M(value):
return ((value & 127), ((value >> 7) & 31))
|
[
"def",
"to_7L5M",
"(",
"value",
")",
":",
"return",
"(",
"(",
"value",
"&",
"127",
")",
",",
"(",
"(",
"value",
">>",
"7",
")",
"&",
"31",
")",
")"
] |
returns a list with the 7 lower bits of the value followed by the 5 higher bits .
|
train
| false
|
50,749
|
@login_required(redirect_field_name='redirect_to')
def login_protected_view_changed_redirect(request):
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
|
[
"@",
"login_required",
"(",
"redirect_field_name",
"=",
"'redirect_to'",
")",
"def",
"login_protected_view_changed_redirect",
"(",
"request",
")",
":",
"t",
"=",
"Template",
"(",
"'This is a login protected test. Username is {{ user.username }}.'",
",",
"name",
"=",
"'Login Template'",
")",
"c",
"=",
"Context",
"(",
"{",
"'user'",
":",
"request",
".",
"user",
"}",
")",
"return",
"HttpResponse",
"(",
"t",
".",
"render",
"(",
"c",
")",
")"
] |
a simple view that is login protected with a custom redirect field set .
|
train
| false
|
50,750
|
def get_np_state_ptr(context, builder):
return get_state_ptr(context, builder, 'np')
|
[
"def",
"get_np_state_ptr",
"(",
"context",
",",
"builder",
")",
":",
"return",
"get_state_ptr",
"(",
"context",
",",
"builder",
",",
"'np'",
")"
] |
get a pointer to the thread-local numpy random state .
|
train
| false
|
50,751
|
def _CreateClassFromElementTree(target_class, tree, namespace=None, tag=None):
if (namespace is None):
namespace = target_class._namespace
if (tag is None):
tag = target_class._tag
if (tree.tag == ('{%s}%s' % (namespace, tag))):
target = target_class()
target._HarvestElementTree(tree)
return target
else:
return None
|
[
"def",
"_CreateClassFromElementTree",
"(",
"target_class",
",",
"tree",
",",
"namespace",
"=",
"None",
",",
"tag",
"=",
"None",
")",
":",
"if",
"(",
"namespace",
"is",
"None",
")",
":",
"namespace",
"=",
"target_class",
".",
"_namespace",
"if",
"(",
"tag",
"is",
"None",
")",
":",
"tag",
"=",
"target_class",
".",
"_tag",
"if",
"(",
"tree",
".",
"tag",
"==",
"(",
"'{%s}%s'",
"%",
"(",
"namespace",
",",
"tag",
")",
")",
")",
":",
"target",
"=",
"target_class",
"(",
")",
"target",
".",
"_HarvestElementTree",
"(",
"tree",
")",
"return",
"target",
"else",
":",
"return",
"None"
] |
instantiates the class and populates members according to the tree .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.