id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
15,592 | def charcount(s):
return len(ansirx.sub('', s))
| [
"def",
"charcount",
"(",
"s",
")",
":",
"return",
"len",
"(",
"ansirx",
".",
"sub",
"(",
"''",
",",
"s",
")",
")"
] | return number of characters in string . | train | false |
15,593 | def GetApplicationId(ios):
temp_path = ('/Users/%s/Library/Application Support/iPhone Simulator/%s/Applications' % (os.environ['USER'], ios))
try:
output = subprocess.check_output(['find', '.', '-iname', 'viewfinder.app'], cwd=temp_path)
if (len(output) is 0):
raise IOError
return output[2:(-16)]
except (OSError, IOError) as e:
print '\nYou are missing the 6.1 IOS simulator... You should update the xcode simulators (Product -> Destination -> More Simulators...).\n', e
exit()
| [
"def",
"GetApplicationId",
"(",
"ios",
")",
":",
"temp_path",
"=",
"(",
"'/Users/%s/Library/Application Support/iPhone Simulator/%s/Applications'",
"%",
"(",
"os",
".",
"environ",
"[",
"'USER'",
"]",
",",
"ios",
")",
")",
"try",
":",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'find'",
",",
"'.'",
",",
"'-iname'",
",",
"'viewfinder.app'",
"]",
",",
"cwd",
"=",
"temp_path",
")",
"if",
"(",
"len",
"(",
"output",
")",
"is",
"0",
")",
":",
"raise",
"IOError",
"return",
"output",
"[",
"2",
":",
"(",
"-",
"16",
")",
"]",
"except",
"(",
"OSError",
",",
"IOError",
")",
"as",
"e",
":",
"print",
"'\\nYou are missing the 6.1 IOS simulator... You should update the xcode simulators (Product -> Destination -> More Simulators...).\\n'",
",",
"e",
"exit",
"(",
")"
] | given the version number of the ios sdk . | train | false |
15,594 | def is_float_broken():
return (str(python.NaN) != str(struct.unpack('!d', '\xff\xf8\x00\x00\x00\x00\x00\x00')[0]))
| [
"def",
"is_float_broken",
"(",
")",
":",
"return",
"(",
"str",
"(",
"python",
".",
"NaN",
")",
"!=",
"str",
"(",
"struct",
".",
"unpack",
"(",
"'!d'",
",",
"'\\xff\\xf8\\x00\\x00\\x00\\x00\\x00\\x00'",
")",
"[",
"0",
"]",
")",
")"
] | older versions of python and the windows platform are renowned for mixing up special floats . | train | false |
15,596 | def test_multiple_rng_aliasing():
rng1 = MRG_RandomStreams(1234)
rng2 = MRG_RandomStreams(2392)
assert (rng1.state_updates is not rng2.state_updates)
| [
"def",
"test_multiple_rng_aliasing",
"(",
")",
":",
"rng1",
"=",
"MRG_RandomStreams",
"(",
"1234",
")",
"rng2",
"=",
"MRG_RandomStreams",
"(",
"2392",
")",
"assert",
"(",
"rng1",
".",
"state_updates",
"is",
"not",
"rng2",
".",
"state_updates",
")"
] | test that when we have multiple random number generators . | train | false |
15,597 | def inet_pton(address_family, ip_string):
if (address_family not in set([socket.AF_INET, socket.AF_INET6])):
raise ValueError(unwrap(u'\n address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),\n not %s\n ', repr(socket.AF_INET), repr(socket.AF_INET6), repr(address_family)))
if (not isinstance(ip_string, str_cls)):
raise TypeError(unwrap(u'\n ip_string must be a unicode string, not %s\n ', type_name(ip_string)))
if (address_family == socket.AF_INET):
octets = ip_string.split(u'.')
error = (len(octets) != 4)
if (not error):
ints = []
for o in octets:
o = int(o)
if ((o > 255) or (o < 0)):
error = True
break
ints.append(o)
if error:
raise ValueError(unwrap(u'\n ip_string must be a dotted string with four integers in the\n range of 0 to 255, got %s\n ', repr(ip_string)))
return struct.pack('!BBBB', *ints)
error = False
omitted = ip_string.count(u'::')
if (omitted > 1):
error = True
elif (omitted == 0):
octets = ip_string.split(u':')
error = (len(octets) != 8)
else:
(begin, end) = ip_string.split(u'::')
begin_octets = begin.split(u':')
end_octets = end.split(u':')
missing = ((8 - len(begin_octets)) - len(end_octets))
octets = ((begin_octets + ([u'0'] * missing)) + end_octets)
if (not error):
ints = []
for o in octets:
o = int(o, 16)
if ((o > 65535) or (o < 0)):
error = True
break
ints.append(o)
return struct.pack('!HHHHHHHH', *ints)
raise ValueError(unwrap(u'\n ip_string must be a valid ipv6 string, got %s\n ', repr(ip_string)))
| [
"def",
"inet_pton",
"(",
"address_family",
",",
"ip_string",
")",
":",
"if",
"(",
"address_family",
"not",
"in",
"set",
"(",
"[",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"AF_INET6",
"]",
")",
")",
":",
"raise",
"ValueError",
"(",
"unwrap",
"(",
"u'\\n address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),\\n not %s\\n '",
",",
"repr",
"(",
"socket",
".",
"AF_INET",
")",
",",
"repr",
"(",
"socket",
".",
"AF_INET6",
")",
",",
"repr",
"(",
"address_family",
")",
")",
")",
"if",
"(",
"not",
"isinstance",
"(",
"ip_string",
",",
"str_cls",
")",
")",
":",
"raise",
"TypeError",
"(",
"unwrap",
"(",
"u'\\n ip_string must be a unicode string, not %s\\n '",
",",
"type_name",
"(",
"ip_string",
")",
")",
")",
"if",
"(",
"address_family",
"==",
"socket",
".",
"AF_INET",
")",
":",
"octets",
"=",
"ip_string",
".",
"split",
"(",
"u'.'",
")",
"error",
"=",
"(",
"len",
"(",
"octets",
")",
"!=",
"4",
")",
"if",
"(",
"not",
"error",
")",
":",
"ints",
"=",
"[",
"]",
"for",
"o",
"in",
"octets",
":",
"o",
"=",
"int",
"(",
"o",
")",
"if",
"(",
"(",
"o",
">",
"255",
")",
"or",
"(",
"o",
"<",
"0",
")",
")",
":",
"error",
"=",
"True",
"break",
"ints",
".",
"append",
"(",
"o",
")",
"if",
"error",
":",
"raise",
"ValueError",
"(",
"unwrap",
"(",
"u'\\n ip_string must be a dotted string with four integers in the\\n range of 0 to 255, got %s\\n '",
",",
"repr",
"(",
"ip_string",
")",
")",
")",
"return",
"struct",
".",
"pack",
"(",
"'!BBBB'",
",",
"*",
"ints",
")",
"error",
"=",
"False",
"omitted",
"=",
"ip_string",
".",
"count",
"(",
"u'::'",
")",
"if",
"(",
"omitted",
">",
"1",
")",
":",
"error",
"=",
"True",
"elif",
"(",
"omitted",
"==",
"0",
")",
":",
"octets",
"=",
"ip_string",
".",
"split",
"(",
"u':'",
")",
"error",
"=",
"(",
"len",
"(",
"octets",
")",
"!=",
"8",
")",
"else",
":",
"(",
"begin",
",",
"end",
")",
"=",
"ip_string",
".",
"split",
"(",
"u'::'",
")",
"begin_octets",
"=",
"begin",
".",
"split",
"(",
"u':'",
")",
"end_octets",
"=",
"end",
".",
"split",
"(",
"u':'",
")",
"missing",
"=",
"(",
"(",
"8",
"-",
"len",
"(",
"begin_octets",
")",
")",
"-",
"len",
"(",
"end_octets",
")",
")",
"octets",
"=",
"(",
"(",
"begin_octets",
"+",
"(",
"[",
"u'0'",
"]",
"*",
"missing",
")",
")",
"+",
"end_octets",
")",
"if",
"(",
"not",
"error",
")",
":",
"ints",
"=",
"[",
"]",
"for",
"o",
"in",
"octets",
":",
"o",
"=",
"int",
"(",
"o",
",",
"16",
")",
"if",
"(",
"(",
"o",
">",
"65535",
")",
"or",
"(",
"o",
"<",
"0",
")",
")",
":",
"error",
"=",
"True",
"break",
"ints",
".",
"append",
"(",
"o",
")",
"return",
"struct",
".",
"pack",
"(",
"'!HHHHHHHH'",
",",
"*",
"ints",
")",
"raise",
"ValueError",
"(",
"unwrap",
"(",
"u'\\n ip_string must be a valid ipv6 string, got %s\\n '",
",",
"repr",
"(",
"ip_string",
")",
")",
")"
] | convert the textual form of a network address into its binary form . | train | false |
15,601 | def with_config(**options):
def decorator(func):
old = {}
def setup():
for (k, v) in options.iteritems():
old[k] = getattr(settings, k, None)
setattr(settings, k, v)
def teardown():
for (k, v) in old.iteritems():
setattr(settings, k, v)
return with_setup(setup, teardown)(func)
return decorator
| [
"def",
"with_config",
"(",
"**",
"options",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"old",
"=",
"{",
"}",
"def",
"setup",
"(",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"options",
".",
"iteritems",
"(",
")",
":",
"old",
"[",
"k",
"]",
"=",
"getattr",
"(",
"settings",
",",
"k",
",",
"None",
")",
"setattr",
"(",
"settings",
",",
"k",
",",
"v",
")",
"def",
"teardown",
"(",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"old",
".",
"iteritems",
"(",
")",
":",
"setattr",
"(",
"settings",
",",
"k",
",",
"v",
")",
"return",
"with_setup",
"(",
"setup",
",",
"teardown",
")",
"(",
"func",
")",
"return",
"decorator"
] | decorator to run a test with the given config options . | train | false |
15,604 | def GetSpecPostbuildCommands(spec, quiet=False):
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if (not quiet):
postbuilds.append(('echo POSTBUILD\\(%s\\) %s' % (spec['target_name'], postbuild['postbuild_name'])))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
| [
"def",
"GetSpecPostbuildCommands",
"(",
"spec",
",",
"quiet",
"=",
"False",
")",
":",
"postbuilds",
"=",
"[",
"]",
"for",
"postbuild",
"in",
"spec",
".",
"get",
"(",
"'postbuilds'",
",",
"[",
"]",
")",
":",
"if",
"(",
"not",
"quiet",
")",
":",
"postbuilds",
".",
"append",
"(",
"(",
"'echo POSTBUILD\\\\(%s\\\\) %s'",
"%",
"(",
"spec",
"[",
"'target_name'",
"]",
",",
"postbuild",
"[",
"'postbuild_name'",
"]",
")",
")",
")",
"postbuilds",
".",
"append",
"(",
"gyp",
".",
"common",
".",
"EncodePOSIXShellList",
"(",
"postbuild",
"[",
"'action'",
"]",
")",
")",
"return",
"postbuilds"
] | returns the list of postbuilds explicitly defined on |spec| . | train | false |
15,605 | def check_allprop_array(values, target):
for value in values:
np.testing.assert_array_equal(value, target)
| [
"def",
"check_allprop_array",
"(",
"values",
",",
"target",
")",
":",
"for",
"value",
"in",
"values",
":",
"np",
".",
"testing",
".",
"assert_array_equal",
"(",
"value",
",",
"target",
")"
] | check to make sure all values match the given target if arrays note: this is not a test . | train | false |
15,606 | def get_cert_data(path, content):
if ((not path) and (not content)):
return None
rval = None
if (path and os.path.exists(path) and os.access(path, os.R_OK)):
rval = open(path).read()
elif content:
rval = content
return rval
| [
"def",
"get_cert_data",
"(",
"path",
",",
"content",
")",
":",
"if",
"(",
"(",
"not",
"path",
")",
"and",
"(",
"not",
"content",
")",
")",
":",
"return",
"None",
"rval",
"=",
"None",
"if",
"(",
"path",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
"and",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"R_OK",
")",
")",
":",
"rval",
"=",
"open",
"(",
"path",
")",
".",
"read",
"(",
")",
"elif",
"content",
":",
"rval",
"=",
"content",
"return",
"rval"
] | get the data for a particular value . | train | false |
15,608 | def cmplx_sort(p):
p = asarray(p)
if iscomplexobj(p):
indx = argsort(abs(p))
else:
indx = argsort(p)
return (take(p, indx, 0), indx)
| [
"def",
"cmplx_sort",
"(",
"p",
")",
":",
"p",
"=",
"asarray",
"(",
"p",
")",
"if",
"iscomplexobj",
"(",
"p",
")",
":",
"indx",
"=",
"argsort",
"(",
"abs",
"(",
"p",
")",
")",
"else",
":",
"indx",
"=",
"argsort",
"(",
"p",
")",
"return",
"(",
"take",
"(",
"p",
",",
"indx",
",",
"0",
")",
",",
"indx",
")"
] | sort roots based on magnitude . | train | false |
15,610 | def create_condition(condition_class, **kwargs):
return Condition.objects.create(proxy_class=_class_path(condition_class), **kwargs)
| [
"def",
"create_condition",
"(",
"condition_class",
",",
"**",
"kwargs",
")",
":",
"return",
"Condition",
".",
"objects",
".",
"create",
"(",
"proxy_class",
"=",
"_class_path",
"(",
"condition_class",
")",
",",
"**",
"kwargs",
")"
] | create a custom condition instance . | train | false |
15,611 | def not_authenticated(func):
def decorated(request, *args, **kwargs):
if request.user.is_authenticated():
next = request.GET.get('next', '/')
return HttpResponseRedirect(next)
return func(request, *args, **kwargs)
return decorated
| [
"def",
"not_authenticated",
"(",
"func",
")",
":",
"def",
"decorated",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
":",
"next",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'next'",
",",
"'/'",
")",
"return",
"HttpResponseRedirect",
"(",
"next",
")",
"return",
"func",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"decorated"
] | decorator that redirect user to next page if he is already logged . | train | false |
15,612 | def aton(sr):
try:
return int(sr)
except ValueError:
try:
return float(sr)
except ValueError:
return False
| [
"def",
"aton",
"(",
"sr",
")",
":",
"try",
":",
"return",
"int",
"(",
"sr",
")",
"except",
"ValueError",
":",
"try",
":",
"return",
"float",
"(",
"sr",
")",
"except",
"ValueError",
":",
"return",
"False"
] | transform a string to a number . | train | false |
15,613 | def record_purchase(params, order):
ccnum_str = params.get('card_accountNumber', '')
first_digit = re.search('\\d', ccnum_str)
if first_digit:
ccnum = ccnum_str[first_digit.start():]
else:
ccnum = '####'
order.purchase(first=params.get('billTo_firstName', ''), last=params.get('billTo_lastName', ''), street1=params.get('billTo_street1', ''), street2=params.get('billTo_street2', ''), city=params.get('billTo_city', ''), state=params.get('billTo_state', ''), country=params.get('billTo_country', ''), postalcode=params.get('billTo_postalCode', ''), ccnum=ccnum, cardtype=CARDTYPE_MAP[params.get('card_cardType', 'UNKNOWN')], processor_reply_dump=json.dumps(params))
| [
"def",
"record_purchase",
"(",
"params",
",",
"order",
")",
":",
"ccnum_str",
"=",
"params",
".",
"get",
"(",
"'card_accountNumber'",
",",
"''",
")",
"first_digit",
"=",
"re",
".",
"search",
"(",
"'\\\\d'",
",",
"ccnum_str",
")",
"if",
"first_digit",
":",
"ccnum",
"=",
"ccnum_str",
"[",
"first_digit",
".",
"start",
"(",
")",
":",
"]",
"else",
":",
"ccnum",
"=",
"'####'",
"order",
".",
"purchase",
"(",
"first",
"=",
"params",
".",
"get",
"(",
"'billTo_firstName'",
",",
"''",
")",
",",
"last",
"=",
"params",
".",
"get",
"(",
"'billTo_lastName'",
",",
"''",
")",
",",
"street1",
"=",
"params",
".",
"get",
"(",
"'billTo_street1'",
",",
"''",
")",
",",
"street2",
"=",
"params",
".",
"get",
"(",
"'billTo_street2'",
",",
"''",
")",
",",
"city",
"=",
"params",
".",
"get",
"(",
"'billTo_city'",
",",
"''",
")",
",",
"state",
"=",
"params",
".",
"get",
"(",
"'billTo_state'",
",",
"''",
")",
",",
"country",
"=",
"params",
".",
"get",
"(",
"'billTo_country'",
",",
"''",
")",
",",
"postalcode",
"=",
"params",
".",
"get",
"(",
"'billTo_postalCode'",
",",
"''",
")",
",",
"ccnum",
"=",
"ccnum",
",",
"cardtype",
"=",
"CARDTYPE_MAP",
"[",
"params",
".",
"get",
"(",
"'card_cardType'",
",",
"'UNKNOWN'",
")",
"]",
",",
"processor_reply_dump",
"=",
"json",
".",
"dumps",
"(",
"params",
")",
")"
] | record the purchase and run purchased_callbacks . | train | false |
15,614 | def get_test_subprocess(cmd=None, **kwds):
kwds.setdefault('stdin', DEVNULL)
kwds.setdefault('stdout', DEVNULL)
if (cmd is None):
assert (not os.path.exists(_TESTFN))
pyline = 'from time import sleep;'
pyline += ("open(r'%s', 'w').close();" % _TESTFN)
pyline += 'sleep(60)'
cmd = [PYTHON, '-c', pyline]
sproc = subprocess.Popen(cmd, **kwds)
wait_for_file(_TESTFN, delete_file=True, empty=True)
else:
sproc = subprocess.Popen(cmd, **kwds)
wait_for_pid(sproc.pid)
_subprocesses_started.add(sproc)
return sproc
| [
"def",
"get_test_subprocess",
"(",
"cmd",
"=",
"None",
",",
"**",
"kwds",
")",
":",
"kwds",
".",
"setdefault",
"(",
"'stdin'",
",",
"DEVNULL",
")",
"kwds",
".",
"setdefault",
"(",
"'stdout'",
",",
"DEVNULL",
")",
"if",
"(",
"cmd",
"is",
"None",
")",
":",
"assert",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"_TESTFN",
")",
")",
"pyline",
"=",
"'from time import sleep;'",
"pyline",
"+=",
"(",
"\"open(r'%s', 'w').close();\"",
"%",
"_TESTFN",
")",
"pyline",
"+=",
"'sleep(60)'",
"cmd",
"=",
"[",
"PYTHON",
",",
"'-c'",
",",
"pyline",
"]",
"sproc",
"=",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"**",
"kwds",
")",
"wait_for_file",
"(",
"_TESTFN",
",",
"delete_file",
"=",
"True",
",",
"empty",
"=",
"True",
")",
"else",
":",
"sproc",
"=",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"**",
"kwds",
")",
"wait_for_pid",
"(",
"sproc",
".",
"pid",
")",
"_subprocesses_started",
".",
"add",
"(",
"sproc",
")",
"return",
"sproc"
] | return a subprocess . | train | false |
15,615 | def gzip_encode(data):
if (not gzip):
raise NotImplementedError
f = BytesIO()
gzf = gzip.GzipFile(mode='wb', fileobj=f, compresslevel=1)
gzf.write(data)
gzf.close()
encoded = f.getvalue()
f.close()
return encoded
| [
"def",
"gzip_encode",
"(",
"data",
")",
":",
"if",
"(",
"not",
"gzip",
")",
":",
"raise",
"NotImplementedError",
"f",
"=",
"BytesIO",
"(",
")",
"gzf",
"=",
"gzip",
".",
"GzipFile",
"(",
"mode",
"=",
"'wb'",
",",
"fileobj",
"=",
"f",
",",
"compresslevel",
"=",
"1",
")",
"gzf",
".",
"write",
"(",
"data",
")",
"gzf",
".",
"close",
"(",
")",
"encoded",
"=",
"f",
".",
"getvalue",
"(",
")",
"f",
".",
"close",
"(",
")",
"return",
"encoded"
] | data -> gzip encoded data encode data using the gzip content encoding as described in rfc 1952 . | train | true |
15,616 | def test_check_update_with_no_data():
prev_points = 100
mean = 0.0
var = 1.0
x_empty = np.empty((0, X.shape[1]))
(tmean, tvar) = GaussianNB._update_mean_variance(prev_points, mean, var, x_empty)
assert_equal(tmean, mean)
assert_equal(tvar, var)
| [
"def",
"test_check_update_with_no_data",
"(",
")",
":",
"prev_points",
"=",
"100",
"mean",
"=",
"0.0",
"var",
"=",
"1.0",
"x_empty",
"=",
"np",
".",
"empty",
"(",
"(",
"0",
",",
"X",
".",
"shape",
"[",
"1",
"]",
")",
")",
"(",
"tmean",
",",
"tvar",
")",
"=",
"GaussianNB",
".",
"_update_mean_variance",
"(",
"prev_points",
",",
"mean",
",",
"var",
",",
"x_empty",
")",
"assert_equal",
"(",
"tmean",
",",
"mean",
")",
"assert_equal",
"(",
"tvar",
",",
"var",
")"
] | test when the partial fit is called without any data . | train | false |
15,617 | def is_request_in_themed_site():
return (configuration_helpers.is_site_configuration_enabled() or microsite.is_request_in_microsite())
| [
"def",
"is_request_in_themed_site",
"(",
")",
":",
"return",
"(",
"configuration_helpers",
".",
"is_site_configuration_enabled",
"(",
")",
"or",
"microsite",
".",
"is_request_in_microsite",
"(",
")",
")"
] | this is a proxy function to hide microsite_configuration behind comprehensive theming . | train | false |
15,622 | def test_write_table_html_fill_values_masked():
buffer_output = StringIO()
t = Table([[1], [1]], names=('a', 'b'), masked=True, dtype=('i4', 'i8'))
t['a'] = np.ma.masked
ascii.write(t, buffer_output, fill_values=(ascii.masked, 'TEST'), format='html')
t_expected = Table([['TEST'], [1]], names=('a', 'b'))
buffer_expected = StringIO()
ascii.write(t_expected, buffer_expected, format='html')
assert (buffer_output.getvalue() == buffer_expected.getvalue())
| [
"def",
"test_write_table_html_fill_values_masked",
"(",
")",
":",
"buffer_output",
"=",
"StringIO",
"(",
")",
"t",
"=",
"Table",
"(",
"[",
"[",
"1",
"]",
",",
"[",
"1",
"]",
"]",
",",
"names",
"=",
"(",
"'a'",
",",
"'b'",
")",
",",
"masked",
"=",
"True",
",",
"dtype",
"=",
"(",
"'i4'",
",",
"'i8'",
")",
")",
"t",
"[",
"'a'",
"]",
"=",
"np",
".",
"ma",
".",
"masked",
"ascii",
".",
"write",
"(",
"t",
",",
"buffer_output",
",",
"fill_values",
"=",
"(",
"ascii",
".",
"masked",
",",
"'TEST'",
")",
",",
"format",
"=",
"'html'",
")",
"t_expected",
"=",
"Table",
"(",
"[",
"[",
"'TEST'",
"]",
",",
"[",
"1",
"]",
"]",
",",
"names",
"=",
"(",
"'a'",
",",
"'b'",
")",
")",
"buffer_expected",
"=",
"StringIO",
"(",
")",
"ascii",
".",
"write",
"(",
"t_expected",
",",
"buffer_expected",
",",
"format",
"=",
"'html'",
")",
"assert",
"(",
"buffer_output",
".",
"getvalue",
"(",
")",
"==",
"buffer_expected",
".",
"getvalue",
"(",
")",
")"
] | test that passing masked values in fill_values should only replace masked columns or values . | train | false |
15,623 | def _hasclass(context, *cls):
node_classes = set(context.context_node.attrib.get('class', '').split())
return node_classes.issuperset(cls)
| [
"def",
"_hasclass",
"(",
"context",
",",
"*",
"cls",
")",
":",
"node_classes",
"=",
"set",
"(",
"context",
".",
"context_node",
".",
"attrib",
".",
"get",
"(",
"'class'",
",",
"''",
")",
".",
"split",
"(",
")",
")",
"return",
"node_classes",
".",
"issuperset",
"(",
"cls",
")"
] | checks if the context node has all the classes passed as arguments . | train | false |
15,624 | def test_hash_vs_typeinfo():
x1 = Symbol('x', even=True)
x2 = Symbol('x', integer=True, odd=False)
assert (hash(x1) == hash(x2))
assert (x1 == x2)
| [
"def",
"test_hash_vs_typeinfo",
"(",
")",
":",
"x1",
"=",
"Symbol",
"(",
"'x'",
",",
"even",
"=",
"True",
")",
"x2",
"=",
"Symbol",
"(",
"'x'",
",",
"integer",
"=",
"True",
",",
"odd",
"=",
"False",
")",
"assert",
"(",
"hash",
"(",
"x1",
")",
"==",
"hash",
"(",
"x2",
")",
")",
"assert",
"(",
"x1",
"==",
"x2",
")"
] | seemingly different typeinfo . | train | false |
15,626 | def find_tasks(source_code):
results = []
for (line, text) in enumerate(source_code.splitlines()):
for todo in re.findall(TASKS_PATTERN, text):
results.append((todo[(-1)].strip().capitalize(), (line + 1)))
return results
| [
"def",
"find_tasks",
"(",
"source_code",
")",
":",
"results",
"=",
"[",
"]",
"for",
"(",
"line",
",",
"text",
")",
"in",
"enumerate",
"(",
"source_code",
".",
"splitlines",
"(",
")",
")",
":",
"for",
"todo",
"in",
"re",
".",
"findall",
"(",
"TASKS_PATTERN",
",",
"text",
")",
":",
"results",
".",
"append",
"(",
"(",
"todo",
"[",
"(",
"-",
"1",
")",
"]",
".",
"strip",
"(",
")",
".",
"capitalize",
"(",
")",
",",
"(",
"line",
"+",
"1",
")",
")",
")",
"return",
"results"
] | find tasks in source code . | train | true |
15,628 | def setup_browser(browser_type='Firefox'):
browser = getattr(webdriver, browser_type)()
hacks_for_phantomjs(browser)
return browser
| [
"def",
"setup_browser",
"(",
"browser_type",
"=",
"'Firefox'",
")",
":",
"browser",
"=",
"getattr",
"(",
"webdriver",
",",
"browser_type",
")",
"(",
")",
"hacks_for_phantomjs",
"(",
"browser",
")",
"return",
"browser"
] | setup the browser . | train | false |
15,629 | def patch_subprocess():
patch_module('subprocess')
| [
"def",
"patch_subprocess",
"(",
")",
":",
"patch_module",
"(",
"'subprocess'",
")"
] | replace :func:subprocess . | train | false |
15,630 | def _create_connection(address, options):
(host, port) = address
if host.endswith('.sock'):
if (not hasattr(socket, 'AF_UNIX')):
raise ConnectionFailure('UNIX-sockets are not supported on this system')
sock = socket.socket(socket.AF_UNIX)
_set_non_inheritable_non_atomic(sock.fileno())
try:
sock.connect(host)
return sock
except socket.error:
sock.close()
raise
family = socket.AF_INET
if (socket.has_ipv6 and (host != 'localhost')):
family = socket.AF_UNSPEC
err = None
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
(af, socktype, proto, dummy, sa) = res
try:
sock = socket.socket(af, (socktype | getattr(socket, 'SOCK_CLOEXEC', 0)), proto)
except socket.error:
sock = socket.socket(af, socktype, proto)
_set_non_inheritable_non_atomic(sock.fileno())
try:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(options.connect_timeout)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, options.socket_keepalive)
sock.connect(sa)
return sock
except socket.error as e:
err = e
sock.close()
if (err is not None):
raise err
else:
raise socket.error('getaddrinfo failed')
| [
"def",
"_create_connection",
"(",
"address",
",",
"options",
")",
":",
"(",
"host",
",",
"port",
")",
"=",
"address",
"if",
"host",
".",
"endswith",
"(",
"'.sock'",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"socket",
",",
"'AF_UNIX'",
")",
")",
":",
"raise",
"ConnectionFailure",
"(",
"'UNIX-sockets are not supported on this system'",
")",
"sock",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_UNIX",
")",
"_set_non_inheritable_non_atomic",
"(",
"sock",
".",
"fileno",
"(",
")",
")",
"try",
":",
"sock",
".",
"connect",
"(",
"host",
")",
"return",
"sock",
"except",
"socket",
".",
"error",
":",
"sock",
".",
"close",
"(",
")",
"raise",
"family",
"=",
"socket",
".",
"AF_INET",
"if",
"(",
"socket",
".",
"has_ipv6",
"and",
"(",
"host",
"!=",
"'localhost'",
")",
")",
":",
"family",
"=",
"socket",
".",
"AF_UNSPEC",
"err",
"=",
"None",
"for",
"res",
"in",
"socket",
".",
"getaddrinfo",
"(",
"host",
",",
"port",
",",
"family",
",",
"socket",
".",
"SOCK_STREAM",
")",
":",
"(",
"af",
",",
"socktype",
",",
"proto",
",",
"dummy",
",",
"sa",
")",
"=",
"res",
"try",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
"af",
",",
"(",
"socktype",
"|",
"getattr",
"(",
"socket",
",",
"'SOCK_CLOEXEC'",
",",
"0",
")",
")",
",",
"proto",
")",
"except",
"socket",
".",
"error",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
"af",
",",
"socktype",
",",
"proto",
")",
"_set_non_inheritable_non_atomic",
"(",
"sock",
".",
"fileno",
"(",
")",
")",
"try",
":",
"sock",
".",
"setsockopt",
"(",
"socket",
".",
"IPPROTO_TCP",
",",
"socket",
".",
"TCP_NODELAY",
",",
"1",
")",
"sock",
".",
"settimeout",
"(",
"options",
".",
"connect_timeout",
")",
"sock",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_KEEPALIVE",
",",
"options",
".",
"socket_keepalive",
")",
"sock",
".",
"connect",
"(",
"sa",
")",
"return",
"sock",
"except",
"socket",
".",
"error",
"as",
"e",
":",
"err",
"=",
"e",
"sock",
".",
"close",
"(",
")",
"if",
"(",
"err",
"is",
"not",
"None",
")",
":",
"raise",
"err",
"else",
":",
"raise",
"socket",
".",
"error",
"(",
"'getaddrinfo failed'",
")"
] | connect to *address* and return the socket object . | train | true |
15,631 | def scratchpad(pl, icons=SCRATCHPAD_ICONS):
return [{u'contents': icons.get(w.scratchpad_state, icons[u'changed']), u'highlight_groups': scratchpad_groups(w)} for w in get_i3_connection().get_tree().descendents() if (w.scratchpad_state != u'none')]
| [
"def",
"scratchpad",
"(",
"pl",
",",
"icons",
"=",
"SCRATCHPAD_ICONS",
")",
":",
"return",
"[",
"{",
"u'contents'",
":",
"icons",
".",
"get",
"(",
"w",
".",
"scratchpad_state",
",",
"icons",
"[",
"u'changed'",
"]",
")",
",",
"u'highlight_groups'",
":",
"scratchpad_groups",
"(",
"w",
")",
"}",
"for",
"w",
"in",
"get_i3_connection",
"(",
")",
".",
"get_tree",
"(",
")",
".",
"descendents",
"(",
")",
"if",
"(",
"w",
".",
"scratchpad_state",
"!=",
"u'none'",
")",
"]"
] | returns the windows currently on the scratchpad . | train | false |
15,632 | @not_implemented_for('directed')
@not_implemented_for('multigraph')
def subgraph_centrality(G):
import numpy
import numpy.linalg
nodelist = list(G)
A = nx.to_numpy_matrix(G, nodelist)
A[(A != 0.0)] = 1
(w, v) = numpy.linalg.eigh(A.A)
vsquare = (numpy.array(v) ** 2)
expw = numpy.exp(w)
xg = numpy.dot(vsquare, expw)
sc = dict(zip(nodelist, map(float, xg)))
return sc
| [
"@",
"not_implemented_for",
"(",
"'directed'",
")",
"@",
"not_implemented_for",
"(",
"'multigraph'",
")",
"def",
"subgraph_centrality",
"(",
"G",
")",
":",
"import",
"numpy",
"import",
"numpy",
".",
"linalg",
"nodelist",
"=",
"list",
"(",
"G",
")",
"A",
"=",
"nx",
".",
"to_numpy_matrix",
"(",
"G",
",",
"nodelist",
")",
"A",
"[",
"(",
"A",
"!=",
"0.0",
")",
"]",
"=",
"1",
"(",
"w",
",",
"v",
")",
"=",
"numpy",
".",
"linalg",
".",
"eigh",
"(",
"A",
".",
"A",
")",
"vsquare",
"=",
"(",
"numpy",
".",
"array",
"(",
"v",
")",
"**",
"2",
")",
"expw",
"=",
"numpy",
".",
"exp",
"(",
"w",
")",
"xg",
"=",
"numpy",
".",
"dot",
"(",
"vsquare",
",",
"expw",
")",
"sc",
"=",
"dict",
"(",
"zip",
"(",
"nodelist",
",",
"map",
"(",
"float",
",",
"xg",
")",
")",
")",
"return",
"sc"
] | return subgraph centrality for each node in g . | train | false |
15,634 | def _tcp_listener(address, backlog=50, reuse_addr=None, family=_socket.AF_INET):
sock = socket(family=family)
if (reuse_addr is not None):
sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, reuse_addr)
try:
sock.bind(address)
except _socket.error:
ex = sys.exc_info()[1]
strerror = getattr(ex, 'strerror', None)
if (strerror is not None):
ex.strerror = ((strerror + ': ') + repr(address))
raise
sock.listen(backlog)
sock.setblocking(0)
return sock
| [
"def",
"_tcp_listener",
"(",
"address",
",",
"backlog",
"=",
"50",
",",
"reuse_addr",
"=",
"None",
",",
"family",
"=",
"_socket",
".",
"AF_INET",
")",
":",
"sock",
"=",
"socket",
"(",
"family",
"=",
"family",
")",
"if",
"(",
"reuse_addr",
"is",
"not",
"None",
")",
":",
"sock",
".",
"setsockopt",
"(",
"_socket",
".",
"SOL_SOCKET",
",",
"_socket",
".",
"SO_REUSEADDR",
",",
"reuse_addr",
")",
"try",
":",
"sock",
".",
"bind",
"(",
"address",
")",
"except",
"_socket",
".",
"error",
":",
"ex",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"strerror",
"=",
"getattr",
"(",
"ex",
",",
"'strerror'",
",",
"None",
")",
"if",
"(",
"strerror",
"is",
"not",
"None",
")",
":",
"ex",
".",
"strerror",
"=",
"(",
"(",
"strerror",
"+",
"': '",
")",
"+",
"repr",
"(",
"address",
")",
")",
"raise",
"sock",
".",
"listen",
"(",
"backlog",
")",
"sock",
".",
"setblocking",
"(",
"0",
")",
"return",
"sock"
] | a shortcut to create a tcp socket . | train | false |
15,635 | def convert_version(text):
version = 0
test = True
m = RE_VERSION.search(text)
if m:
version = (((int(m.group(1)) * 1000000) + (int(m.group(2)) * 10000)) + (int(m.group(3)) * 100))
try:
if (m.group(4).lower() == 'rc'):
version = (version + 80)
elif (m.group(4).lower() == 'beta'):
version = (version + 40)
version = (version + int(m.group(5)))
except:
version = (version + 99)
test = False
return (version, test)
| [
"def",
"convert_version",
"(",
"text",
")",
":",
"version",
"=",
"0",
"test",
"=",
"True",
"m",
"=",
"RE_VERSION",
".",
"search",
"(",
"text",
")",
"if",
"m",
":",
"version",
"=",
"(",
"(",
"(",
"int",
"(",
"m",
".",
"group",
"(",
"1",
")",
")",
"*",
"1000000",
")",
"+",
"(",
"int",
"(",
"m",
".",
"group",
"(",
"2",
")",
")",
"*",
"10000",
")",
")",
"+",
"(",
"int",
"(",
"m",
".",
"group",
"(",
"3",
")",
")",
"*",
"100",
")",
")",
"try",
":",
"if",
"(",
"m",
".",
"group",
"(",
"4",
")",
".",
"lower",
"(",
")",
"==",
"'rc'",
")",
":",
"version",
"=",
"(",
"version",
"+",
"80",
")",
"elif",
"(",
"m",
".",
"group",
"(",
"4",
")",
".",
"lower",
"(",
")",
"==",
"'beta'",
")",
":",
"version",
"=",
"(",
"version",
"+",
"40",
")",
"version",
"=",
"(",
"version",
"+",
"int",
"(",
"m",
".",
"group",
"(",
"5",
")",
")",
")",
"except",
":",
"version",
"=",
"(",
"version",
"+",
"99",
")",
"test",
"=",
"False",
"return",
"(",
"version",
",",
"test",
")"
] | convert version string to numerical value and a testversion indicator . | train | false |
15,636 | @set_database
def get_or_create(item, **kwargs):
if item:
return Item.create_or_get(**parse_model_data(item))
| [
"@",
"set_database",
"def",
"get_or_create",
"(",
"item",
",",
"**",
"kwargs",
")",
":",
"if",
"item",
":",
"return",
"Item",
".",
"create_or_get",
"(",
"**",
"parse_model_data",
"(",
"item",
")",
")"
] | populate or update the database entry . | train | false |
15,637 | def end():
if core.is_standalone():
standalone.end()
logger.info('Stop Glances (with CTRL-C)')
elif core.is_client():
client.end()
logger.info('Stop Glances client (with CTRL-C)')
elif core.is_server():
server.end()
logger.info('Stop Glances server (with CTRL-C)')
elif core.is_webserver():
webserver.end()
logger.info('Stop Glances web server(with CTRL-C)')
sys.exit(0)
| [
"def",
"end",
"(",
")",
":",
"if",
"core",
".",
"is_standalone",
"(",
")",
":",
"standalone",
".",
"end",
"(",
")",
"logger",
".",
"info",
"(",
"'Stop Glances (with CTRL-C)'",
")",
"elif",
"core",
".",
"is_client",
"(",
")",
":",
"client",
".",
"end",
"(",
")",
"logger",
".",
"info",
"(",
"'Stop Glances client (with CTRL-C)'",
")",
"elif",
"core",
".",
"is_server",
"(",
")",
":",
"server",
".",
"end",
"(",
")",
"logger",
".",
"info",
"(",
"'Stop Glances server (with CTRL-C)'",
")",
"elif",
"core",
".",
"is_webserver",
"(",
")",
":",
"webserver",
".",
"end",
"(",
")",
"logger",
".",
"info",
"(",
"'Stop Glances web server(with CTRL-C)'",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] | event: ends the coroutine and returns a value to its delegator . | train | false |
15,640 | @task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([('settings=', 's', 'Django settings'), ('fake-initial', None, 'Fake the initial migrations')])
@timed
def update_db(options):
settings = getattr(options, 'settings', DEFAULT_SETTINGS)
fake = ('--fake-initial' if getattr(options, 'fake_initial', False) else '')
for system in ('lms', 'cms'):
sh('NO_EDXAPP_SUDO=1 EDX_PLATFORM_SETTINGS_OVERRIDE={settings} /edx/bin/edxapp-migrate-{system} --traceback --pythonpath=. {fake}'.format(settings=settings, system=system, fake=fake))
| [
"@",
"task",
"@",
"needs",
"(",
"'pavelib.prereqs.install_prereqs'",
")",
"@",
"cmdopts",
"(",
"[",
"(",
"'settings='",
",",
"'s'",
",",
"'Django settings'",
")",
",",
"(",
"'fake-initial'",
",",
"None",
",",
"'Fake the initial migrations'",
")",
"]",
")",
"@",
"timed",
"def",
"update_db",
"(",
"options",
")",
":",
"settings",
"=",
"getattr",
"(",
"options",
",",
"'settings'",
",",
"DEFAULT_SETTINGS",
")",
"fake",
"=",
"(",
"'--fake-initial'",
"if",
"getattr",
"(",
"options",
",",
"'fake_initial'",
",",
"False",
")",
"else",
"''",
")",
"for",
"system",
"in",
"(",
"'lms'",
",",
"'cms'",
")",
":",
"sh",
"(",
"'NO_EDXAPP_SUDO=1 EDX_PLATFORM_SETTINGS_OVERRIDE={settings} /edx/bin/edxapp-migrate-{system} --traceback --pythonpath=. {fake}'",
".",
"format",
"(",
"settings",
"=",
"settings",
",",
"system",
"=",
"system",
",",
"fake",
"=",
"fake",
")",
")"
] | migrates the lms and cms across all databases . | train | false |
15,642 | @click.command('procfile')
def setup_procfile():
from bench.config.procfile import setup_procfile
setup_procfile('.')
| [
"@",
"click",
".",
"command",
"(",
"'procfile'",
")",
"def",
"setup_procfile",
"(",
")",
":",
"from",
"bench",
".",
"config",
".",
"procfile",
"import",
"setup_procfile",
"setup_procfile",
"(",
"'.'",
")"
] | setup procfile for bench start . | train | false |
15,643 | def _is_ssl_error(e):
if _TLS:
return isinstance(e, SSL.Error)
return False
| [
"def",
"_is_ssl_error",
"(",
"e",
")",
":",
"if",
"_TLS",
":",
"return",
"isinstance",
"(",
"e",
",",
"SSL",
".",
"Error",
")",
"return",
"False"
] | internal helper . | train | false |
15,644 | def rmsprop(opfunc, x, config, state=None):
if ((config is None) and (state is None)):
raise ValueError('rmsprop requires a dictionary to retain state between iterations')
state = (state if (state is not None) else config)
lr = config.get('learningRate', 0.01)
alpha = config.get('alpha', 0.99)
epsilon = config.get('epsilon', 1e-08)
wd = config.get('weightDecay', 0)
(fx, dfdx) = opfunc(x)
if (wd != 0):
dfdx.add_(wd, x)
if (not ('m' in state)):
state['m'] = x.new().resize_as_(dfdx).zero_()
state['tmp'] = x.new().resize_as_(dfdx)
state['m'].mul_(alpha)
state['m'].addcmul_((1.0 - alpha), dfdx, dfdx)
torch.sqrt(state['m'], out=state['tmp']).add_(epsilon)
x.addcdiv_((- lr), dfdx, state['tmp'])
return (x, fx)
| [
"def",
"rmsprop",
"(",
"opfunc",
",",
"x",
",",
"config",
",",
"state",
"=",
"None",
")",
":",
"if",
"(",
"(",
"config",
"is",
"None",
")",
"and",
"(",
"state",
"is",
"None",
")",
")",
":",
"raise",
"ValueError",
"(",
"'rmsprop requires a dictionary to retain state between iterations'",
")",
"state",
"=",
"(",
"state",
"if",
"(",
"state",
"is",
"not",
"None",
")",
"else",
"config",
")",
"lr",
"=",
"config",
".",
"get",
"(",
"'learningRate'",
",",
"0.01",
")",
"alpha",
"=",
"config",
".",
"get",
"(",
"'alpha'",
",",
"0.99",
")",
"epsilon",
"=",
"config",
".",
"get",
"(",
"'epsilon'",
",",
"1e-08",
")",
"wd",
"=",
"config",
".",
"get",
"(",
"'weightDecay'",
",",
"0",
")",
"(",
"fx",
",",
"dfdx",
")",
"=",
"opfunc",
"(",
"x",
")",
"if",
"(",
"wd",
"!=",
"0",
")",
":",
"dfdx",
".",
"add_",
"(",
"wd",
",",
"x",
")",
"if",
"(",
"not",
"(",
"'m'",
"in",
"state",
")",
")",
":",
"state",
"[",
"'m'",
"]",
"=",
"x",
".",
"new",
"(",
")",
".",
"resize_as_",
"(",
"dfdx",
")",
".",
"zero_",
"(",
")",
"state",
"[",
"'tmp'",
"]",
"=",
"x",
".",
"new",
"(",
")",
".",
"resize_as_",
"(",
"dfdx",
")",
"state",
"[",
"'m'",
"]",
".",
"mul_",
"(",
"alpha",
")",
"state",
"[",
"'m'",
"]",
".",
"addcmul_",
"(",
"(",
"1.0",
"-",
"alpha",
")",
",",
"dfdx",
",",
"dfdx",
")",
"torch",
".",
"sqrt",
"(",
"state",
"[",
"'m'",
"]",
",",
"out",
"=",
"state",
"[",
"'tmp'",
"]",
")",
".",
"add_",
"(",
"epsilon",
")",
"x",
".",
"addcdiv_",
"(",
"(",
"-",
"lr",
")",
",",
"dfdx",
",",
"state",
"[",
"'tmp'",
"]",
")",
"return",
"(",
"x",
",",
"fx",
")"
] | rmsprop updates scale learning rates by dividing with the moving average of the root mean squared gradients . | train | false |
15,645 | def get_org_branches():
orgs = ['Org-A', 'Org-B', 'Org-C']
if current.deployment_settings.get_org_branches():
branches = [None, 'Branch-A']
else:
branches = [None]
return (orgs, branches)
| [
"def",
"get_org_branches",
"(",
")",
":",
"orgs",
"=",
"[",
"'Org-A'",
",",
"'Org-B'",
",",
"'Org-C'",
"]",
"if",
"current",
".",
"deployment_settings",
".",
"get_org_branches",
"(",
")",
":",
"branches",
"=",
"[",
"None",
",",
"'Branch-A'",
"]",
"else",
":",
"branches",
"=",
"[",
"None",
"]",
"return",
"(",
"orgs",
",",
"branches",
")"
] | defines the organisations and branches for which the role test data is to be created . | train | false |
15,648 | def row_match(row, conditions):
return all((condition_match(row, cond) for cond in conditions))
| [
"def",
"row_match",
"(",
"row",
",",
"conditions",
")",
":",
"return",
"all",
"(",
"(",
"condition_match",
"(",
"row",
",",
"cond",
")",
"for",
"cond",
"in",
"conditions",
")",
")"
] | return whether the row matches the list of conditions . | train | false |
15,649 | def display_datetime(dt, custom_tz=None):
timeformat = '%Y-%m-%d %H:%M:%S %Z%z'
if ((dt.tzinfo is not None) and (dt.tzinfo.utcoffset(dt) is not None)):
if (custom_tz is not None):
dt = dt.astimezone(custom_tz)
elif (config.TZ is not None):
if isinstance(config.TZ, str):
secs = calendar.timegm(dt.timetuple())
os.environ['TZ'] = config.TZ
time.tzset()
timeformat = timeformat[:(-2)]
return time.strftime(timeformat, time.localtime(secs))
else:
dt = dt.astimezone(config.tz)
return (('{0:' + timeformat) + '}').format(dt)
| [
"def",
"display_datetime",
"(",
"dt",
",",
"custom_tz",
"=",
"None",
")",
":",
"timeformat",
"=",
"'%Y-%m-%d %H:%M:%S %Z%z'",
"if",
"(",
"(",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
")",
"and",
"(",
"dt",
".",
"tzinfo",
".",
"utcoffset",
"(",
"dt",
")",
"is",
"not",
"None",
")",
")",
":",
"if",
"(",
"custom_tz",
"is",
"not",
"None",
")",
":",
"dt",
"=",
"dt",
".",
"astimezone",
"(",
"custom_tz",
")",
"elif",
"(",
"config",
".",
"TZ",
"is",
"not",
"None",
")",
":",
"if",
"isinstance",
"(",
"config",
".",
"TZ",
",",
"str",
")",
":",
"secs",
"=",
"calendar",
".",
"timegm",
"(",
"dt",
".",
"timetuple",
"(",
")",
")",
"os",
".",
"environ",
"[",
"'TZ'",
"]",
"=",
"config",
".",
"TZ",
"time",
".",
"tzset",
"(",
")",
"timeformat",
"=",
"timeformat",
"[",
":",
"(",
"-",
"2",
")",
"]",
"return",
"time",
".",
"strftime",
"(",
"timeformat",
",",
"time",
".",
"localtime",
"(",
"secs",
")",
")",
"else",
":",
"dt",
"=",
"dt",
".",
"astimezone",
"(",
"config",
".",
"tz",
")",
"return",
"(",
"(",
"'{0:'",
"+",
"timeformat",
")",
"+",
"'}'",
")",
".",
"format",
"(",
"dt",
")"
] | returns a string from a datetime according to the display tz (or a custom one . | train | false |
15,651 | def revoke_certs_by_user_and_project(user_id, project_id):
admin = context.get_admin_context()
for cert in db.certificate_get_all_by_user_and_project(admin, user_id, project_id):
revoke_cert(cert['project_id'], cert['file_name'])
| [
"def",
"revoke_certs_by_user_and_project",
"(",
"user_id",
",",
"project_id",
")",
":",
"admin",
"=",
"context",
".",
"get_admin_context",
"(",
")",
"for",
"cert",
"in",
"db",
".",
"certificate_get_all_by_user_and_project",
"(",
"admin",
",",
"user_id",
",",
"project_id",
")",
":",
"revoke_cert",
"(",
"cert",
"[",
"'project_id'",
"]",
",",
"cert",
"[",
"'file_name'",
"]",
")"
] | revoke certs for user in project . | train | false |
15,652 | @handle_response_format
@treeio_login_required
def task_delete(request, task_id, response_format='html'):
task = get_object_or_404(Task, pk=task_id)
if (not request.user.profile.has_permission(task, mode='w')):
return user_denied(request, message="You don't have access to this Task")
if request.POST:
if ('delete' in request.POST):
if ('trash' in request.POST):
task.trash = True
task.save()
else:
task.delete()
return HttpResponseRedirect(reverse('projects_index'))
elif ('cancel' in request.POST):
return HttpResponseRedirect(reverse('projects_task_view', args=[task.id]))
subtasks = Object.filter_by_request(request, Task.objects.filter(parent=task))
time_slots = Object.filter_by_request(request, TaskTimeSlot.objects.filter(task=task))
context = _get_default_context(request)
context.update({'task': task, 'subtasks': subtasks, 'time_slots': time_slots})
return render_to_response('projects/task_delete', context, context_instance=RequestContext(request), response_format=response_format)
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"task_delete",
"(",
"request",
",",
"task_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"task",
"=",
"get_object_or_404",
"(",
"Task",
",",
"pk",
"=",
"task_id",
")",
"if",
"(",
"not",
"request",
".",
"user",
".",
"profile",
".",
"has_permission",
"(",
"task",
",",
"mode",
"=",
"'w'",
")",
")",
":",
"return",
"user_denied",
"(",
"request",
",",
"message",
"=",
"\"You don't have access to this Task\"",
")",
"if",
"request",
".",
"POST",
":",
"if",
"(",
"'delete'",
"in",
"request",
".",
"POST",
")",
":",
"if",
"(",
"'trash'",
"in",
"request",
".",
"POST",
")",
":",
"task",
".",
"trash",
"=",
"True",
"task",
".",
"save",
"(",
")",
"else",
":",
"task",
".",
"delete",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'projects_index'",
")",
")",
"elif",
"(",
"'cancel'",
"in",
"request",
".",
"POST",
")",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'projects_task_view'",
",",
"args",
"=",
"[",
"task",
".",
"id",
"]",
")",
")",
"subtasks",
"=",
"Object",
".",
"filter_by_request",
"(",
"request",
",",
"Task",
".",
"objects",
".",
"filter",
"(",
"parent",
"=",
"task",
")",
")",
"time_slots",
"=",
"Object",
".",
"filter_by_request",
"(",
"request",
",",
"TaskTimeSlot",
".",
"objects",
".",
"filter",
"(",
"task",
"=",
"task",
")",
")",
"context",
"=",
"_get_default_context",
"(",
"request",
")",
"context",
".",
"update",
"(",
"{",
"'task'",
":",
"task",
",",
"'subtasks'",
":",
"subtasks",
",",
"'time_slots'",
":",
"time_slots",
"}",
")",
"return",
"render_to_response",
"(",
"'projects/task_delete'",
",",
"context",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] | delete a task object . | train | false |
15,653 | def boto3_tag_list_to_ansible_dict(tags_list):
tags_dict = {}
for tag in tags_list:
if ('key' in tag):
tags_dict[tag['key']] = tag['value']
elif ('Key' in tag):
tags_dict[tag['Key']] = tag['Value']
return tags_dict
| [
"def",
"boto3_tag_list_to_ansible_dict",
"(",
"tags_list",
")",
":",
"tags_dict",
"=",
"{",
"}",
"for",
"tag",
"in",
"tags_list",
":",
"if",
"(",
"'key'",
"in",
"tag",
")",
":",
"tags_dict",
"[",
"tag",
"[",
"'key'",
"]",
"]",
"=",
"tag",
"[",
"'value'",
"]",
"elif",
"(",
"'Key'",
"in",
"tag",
")",
":",
"tags_dict",
"[",
"tag",
"[",
"'Key'",
"]",
"]",
"=",
"tag",
"[",
"'Value'",
"]",
"return",
"tags_dict"
] | convert a boto3 list of resource tags to a flat dict of key:value pairs args: tags_list : list of dicts representing aws tags . | train | false |
15,654 | def RegisterLateBindingCallback(target_name, callback, **kwargs):
_LATE_BINDING_STORE.setdefault(target_name, []).append((callback, kwargs))
| [
"def",
"RegisterLateBindingCallback",
"(",
"target_name",
",",
"callback",
",",
"**",
"kwargs",
")",
":",
"_LATE_BINDING_STORE",
".",
"setdefault",
"(",
"target_name",
",",
"[",
"]",
")",
".",
"append",
"(",
"(",
"callback",
",",
"kwargs",
")",
")"
] | registers a callback to be invoked when the rdfvalue named is declared . | train | true |
15,657 | def javascript_confirm(url, js_msg, abort_on):
log.js.debug('confirm: {}'.format(js_msg))
if config.get('ui', 'modal-js-dialog'):
raise CallSuper
msg = 'From <b>{}</b>:<br/>{}'.format(html.escape(url.toDisplayString()), html.escape(js_msg))
ans = message.ask('Javascript confirm', msg, mode=usertypes.PromptMode.yesno, abort_on=abort_on)
return bool(ans)
| [
"def",
"javascript_confirm",
"(",
"url",
",",
"js_msg",
",",
"abort_on",
")",
":",
"log",
".",
"js",
".",
"debug",
"(",
"'confirm: {}'",
".",
"format",
"(",
"js_msg",
")",
")",
"if",
"config",
".",
"get",
"(",
"'ui'",
",",
"'modal-js-dialog'",
")",
":",
"raise",
"CallSuper",
"msg",
"=",
"'From <b>{}</b>:<br/>{}'",
".",
"format",
"(",
"html",
".",
"escape",
"(",
"url",
".",
"toDisplayString",
"(",
")",
")",
",",
"html",
".",
"escape",
"(",
"js_msg",
")",
")",
"ans",
"=",
"message",
".",
"ask",
"(",
"'Javascript confirm'",
",",
"msg",
",",
"mode",
"=",
"usertypes",
".",
"PromptMode",
".",
"yesno",
",",
"abort_on",
"=",
"abort_on",
")",
"return",
"bool",
"(",
"ans",
")"
] | display a javascript confirm prompt . | train | false |
15,660 | def filter_wiki(raw):
text = utils.to_unicode(raw, 'utf8', errors='ignore')
text = utils.decode_htmlentities(text)
return remove_markup(text)
| [
"def",
"filter_wiki",
"(",
"raw",
")",
":",
"text",
"=",
"utils",
".",
"to_unicode",
"(",
"raw",
",",
"'utf8'",
",",
"errors",
"=",
"'ignore'",
")",
"text",
"=",
"utils",
".",
"decode_htmlentities",
"(",
"text",
")",
"return",
"remove_markup",
"(",
"text",
")"
] | filter out wiki mark-up from raw . | train | false |
15,661 | def instance_get_by_uuid(context, uuid, columns_to_join=None):
return IMPL.instance_get_by_uuid(context, uuid, columns_to_join)
| [
"def",
"instance_get_by_uuid",
"(",
"context",
",",
"uuid",
",",
"columns_to_join",
"=",
"None",
")",
":",
"return",
"IMPL",
".",
"instance_get_by_uuid",
"(",
"context",
",",
"uuid",
",",
"columns_to_join",
")"
] | get an instance or raise if it does not exist . | train | false |
15,662 | @task
def finalize_index(index_pk):
from kuma.wiki.search import WikiDocumentType
from kuma.search.models import Index
cls = WikiDocumentType
es = cls.get_connection('indexing')
index = Index.objects.get(pk=index_pk)
es.indices.optimize(index=index.prefixed_name)
index_settings = {'index': {'refresh_interval': settings.ES_DEFAULT_REFRESH_INTERVAL, 'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS}}
es.indices.put_settings(index=index.prefixed_name, body=index_settings)
index.populated = True
index.save()
subject = ('Index %s completely populated' % index.prefixed_name)
message = 'You may want to promote it now via the admin interface.'
mail_admins(subject=subject, message=message)
| [
"@",
"task",
"def",
"finalize_index",
"(",
"index_pk",
")",
":",
"from",
"kuma",
".",
"wiki",
".",
"search",
"import",
"WikiDocumentType",
"from",
"kuma",
".",
"search",
".",
"models",
"import",
"Index",
"cls",
"=",
"WikiDocumentType",
"es",
"=",
"cls",
".",
"get_connection",
"(",
"'indexing'",
")",
"index",
"=",
"Index",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"index_pk",
")",
"es",
".",
"indices",
".",
"optimize",
"(",
"index",
"=",
"index",
".",
"prefixed_name",
")",
"index_settings",
"=",
"{",
"'index'",
":",
"{",
"'refresh_interval'",
":",
"settings",
".",
"ES_DEFAULT_REFRESH_INTERVAL",
",",
"'number_of_replicas'",
":",
"settings",
".",
"ES_DEFAULT_NUM_REPLICAS",
"}",
"}",
"es",
".",
"indices",
".",
"put_settings",
"(",
"index",
"=",
"index",
".",
"prefixed_name",
",",
"body",
"=",
"index_settings",
")",
"index",
".",
"populated",
"=",
"True",
"index",
".",
"save",
"(",
")",
"subject",
"=",
"(",
"'Index %s completely populated'",
"%",
"index",
".",
"prefixed_name",
")",
"message",
"=",
"'You may want to promote it now via the admin interface.'",
"mail_admins",
"(",
"subject",
"=",
"subject",
",",
"message",
"=",
"message",
")"
] | finalizes the elasticsearch index . | train | false |
15,663 | def ProcessCommandFlags(args):
flags = {}
rkeyval = '--(?P<key>\\S*)[=](?P<value>\\S*)'
roption = '--(?P<option>\\S*)'
r = (((('(' + rkeyval) + ')|(') + roption) + ')')
rc = re.compile(r)
for a in args:
try:
rcg = rc.search(a).groupdict()
if rcg.has_key('key'):
flags[rcg['key']] = rcg['value']
if rcg.has_key('option'):
flags[rcg['option']] = rcg['option']
except AttributeError:
return None
return flags
| [
"def",
"ProcessCommandFlags",
"(",
"args",
")",
":",
"flags",
"=",
"{",
"}",
"rkeyval",
"=",
"'--(?P<key>\\\\S*)[=](?P<value>\\\\S*)'",
"roption",
"=",
"'--(?P<option>\\\\S*)'",
"r",
"=",
"(",
"(",
"(",
"(",
"'('",
"+",
"rkeyval",
")",
"+",
"')|('",
")",
"+",
"roption",
")",
"+",
"')'",
")",
"rc",
"=",
"re",
".",
"compile",
"(",
"r",
")",
"for",
"a",
"in",
"args",
":",
"try",
":",
"rcg",
"=",
"rc",
".",
"search",
"(",
"a",
")",
".",
"groupdict",
"(",
")",
"if",
"rcg",
".",
"has_key",
"(",
"'key'",
")",
":",
"flags",
"[",
"rcg",
"[",
"'key'",
"]",
"]",
"=",
"rcg",
"[",
"'value'",
"]",
"if",
"rcg",
".",
"has_key",
"(",
"'option'",
")",
":",
"flags",
"[",
"rcg",
"[",
"'option'",
"]",
"]",
"=",
"rcg",
"[",
"'option'",
"]",
"except",
"AttributeError",
":",
"return",
"None",
"return",
"flags"
] | parse command line flags per specified usage . | train | false |
15,664 | @receiver(post_save, sender=Action, dispatch_uid='action_create_notifications')
def add_notification_for_action(sender, instance, created, **kwargs):
if (not created):
return
from kitsune.notifications import tasks
tasks.add_notification_for_action.delay(instance.id)
| [
"@",
"receiver",
"(",
"post_save",
",",
"sender",
"=",
"Action",
",",
"dispatch_uid",
"=",
"'action_create_notifications'",
")",
"def",
"add_notification_for_action",
"(",
"sender",
",",
"instance",
",",
"created",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"created",
")",
":",
"return",
"from",
"kitsune",
".",
"notifications",
"import",
"tasks",
"tasks",
".",
"add_notification_for_action",
".",
"delay",
"(",
"instance",
".",
"id",
")"
] | when an action is created . | train | false |
15,665 | def find_matching_files(directory, regex):
files = glob.glob(os.path.join(directory, '*'))
results = []
for f in files:
if regex.match(os.path.basename(f)):
results.append(f)
return results
| [
"def",
"find_matching_files",
"(",
"directory",
",",
"regex",
")",
":",
"files",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"'*'",
")",
")",
"results",
"=",
"[",
"]",
"for",
"f",
"in",
"files",
":",
"if",
"regex",
".",
"match",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
")",
":",
"results",
".",
"append",
"(",
"f",
")",
"return",
"results"
] | find all files in a given directory that match a given regex . | train | false |
15,666 | def test_indent():
multiline_string = 'test\ntest1\ntest2\ntest3'
indented_multiline_string = ' test\n test1\n test2\n test3'
assert (indented_multiline_string == _indent(multiline_string, 4))
| [
"def",
"test_indent",
"(",
")",
":",
"multiline_string",
"=",
"'test\\ntest1\\ntest2\\ntest3'",
"indented_multiline_string",
"=",
"' test\\n test1\\n test2\\n test3'",
"assert",
"(",
"indented_multiline_string",
"==",
"_indent",
"(",
"multiline_string",
",",
"4",
")",
")"
] | assert that indenting a multiline string by 4 spaces prepends 4 spaces before each new line . | train | false |
15,667 | def _get_surf_neighbors(surf, k):
verts = surf['tris'][surf['neighbor_tri'][k]]
verts = np.setdiff1d(verts, [k], assume_unique=False)
assert np.all((verts < surf['np']))
nneighbors = len(verts)
nneigh_max = len(surf['neighbor_tri'][k])
if (nneighbors > nneigh_max):
raise RuntimeError(('Too many neighbors for vertex %d' % k))
elif (nneighbors != nneigh_max):
logger.info((' Incorrect number of distinct neighbors for vertex %d (%d instead of %d) [fixed].' % (k, nneighbors, nneigh_max)))
return verts
| [
"def",
"_get_surf_neighbors",
"(",
"surf",
",",
"k",
")",
":",
"verts",
"=",
"surf",
"[",
"'tris'",
"]",
"[",
"surf",
"[",
"'neighbor_tri'",
"]",
"[",
"k",
"]",
"]",
"verts",
"=",
"np",
".",
"setdiff1d",
"(",
"verts",
",",
"[",
"k",
"]",
",",
"assume_unique",
"=",
"False",
")",
"assert",
"np",
".",
"all",
"(",
"(",
"verts",
"<",
"surf",
"[",
"'np'",
"]",
")",
")",
"nneighbors",
"=",
"len",
"(",
"verts",
")",
"nneigh_max",
"=",
"len",
"(",
"surf",
"[",
"'neighbor_tri'",
"]",
"[",
"k",
"]",
")",
"if",
"(",
"nneighbors",
">",
"nneigh_max",
")",
":",
"raise",
"RuntimeError",
"(",
"(",
"'Too many neighbors for vertex %d'",
"%",
"k",
")",
")",
"elif",
"(",
"nneighbors",
"!=",
"nneigh_max",
")",
":",
"logger",
".",
"info",
"(",
"(",
"' Incorrect number of distinct neighbors for vertex %d (%d instead of %d) [fixed].'",
"%",
"(",
"k",
",",
"nneighbors",
",",
"nneigh_max",
")",
")",
")",
"return",
"verts"
] | calculate the surface neighbors based on triangulation . | train | false |
15,668 | def picker(field_name):
return (lambda row: row[field_name])
| [
"def",
"picker",
"(",
"field_name",
")",
":",
"return",
"(",
"lambda",
"row",
":",
"row",
"[",
"field_name",
"]",
")"
] | returns a function that picks a field out of a dict . | train | false |
15,669 | def installed_packages(image=None):
return _get_components('Package Identity', 'Packages', 'Installed')
| [
"def",
"installed_packages",
"(",
"image",
"=",
"None",
")",
":",
"return",
"_get_components",
"(",
"'Package Identity'",
",",
"'Packages'",
",",
"'Installed'",
")"
] | list the packages installed on the system args: image : the path to the root directory of an offline windows image . | train | false |
15,670 | def port_field_data(request):
def add_more_info_port_name(port):
return '{} ({})'.format(port.name_or_id, ','.join([ip['ip_address'] for ip in port['fixed_ips']]))
ports = []
if api.base.is_service_enabled(request, 'network'):
network_list = api.neutron.network_list_for_tenant(request, request.user.tenant_id)
for network in network_list:
ports.extend([(port.id, add_more_info_port_name(port)) for port in api.neutron.port_list(request, network_id=network.id) if (port.device_owner == '')])
ports.sort(key=(lambda obj: obj[1]))
return ports
| [
"def",
"port_field_data",
"(",
"request",
")",
":",
"def",
"add_more_info_port_name",
"(",
"port",
")",
":",
"return",
"'{} ({})'",
".",
"format",
"(",
"port",
".",
"name_or_id",
",",
"','",
".",
"join",
"(",
"[",
"ip",
"[",
"'ip_address'",
"]",
"for",
"ip",
"in",
"port",
"[",
"'fixed_ips'",
"]",
"]",
")",
")",
"ports",
"=",
"[",
"]",
"if",
"api",
".",
"base",
".",
"is_service_enabled",
"(",
"request",
",",
"'network'",
")",
":",
"network_list",
"=",
"api",
".",
"neutron",
".",
"network_list_for_tenant",
"(",
"request",
",",
"request",
".",
"user",
".",
"tenant_id",
")",
"for",
"network",
"in",
"network_list",
":",
"ports",
".",
"extend",
"(",
"[",
"(",
"port",
".",
"id",
",",
"add_more_info_port_name",
"(",
"port",
")",
")",
"for",
"port",
"in",
"api",
".",
"neutron",
".",
"port_list",
"(",
"request",
",",
"network_id",
"=",
"network",
".",
"id",
")",
"if",
"(",
"port",
".",
"device_owner",
"==",
"''",
")",
"]",
")",
"ports",
".",
"sort",
"(",
"key",
"=",
"(",
"lambda",
"obj",
":",
"obj",
"[",
"1",
"]",
")",
")",
"return",
"ports"
] | returns a list of tuples of all ports available for the tenant . | train | false |
15,671 | def koans_suite(names):
suite = unittest.TestSuite()
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
for name in names:
tests = loader.loadTestsFromName(name)
suite.addTests(tests)
return suite
| [
"def",
"koans_suite",
"(",
"names",
")",
":",
"suite",
"=",
"unittest",
".",
"TestSuite",
"(",
")",
"loader",
"=",
"unittest",
".",
"TestLoader",
"(",
")",
"loader",
".",
"sortTestMethodsUsing",
"=",
"None",
"for",
"name",
"in",
"names",
":",
"tests",
"=",
"loader",
".",
"loadTestsFromName",
"(",
"name",
")",
"suite",
".",
"addTests",
"(",
"tests",
")",
"return",
"suite"
] | returns a testsuite loaded with all tests found in the given names . | train | false |
15,672 | @task
def addon_requirements(ctx):
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory)
requirements_file = os.path.join(path, 'requirements.txt')
if (os.path.isdir(path) and os.path.isfile(requirements_file)):
print 'Installing requirements for {0}'.format(directory)
ctx.run(pip_install(requirements_file, constraints_file=CONSTRAINTS_PATH), echo=True)
print 'Finished installing addon requirements'
| [
"@",
"task",
"def",
"addon_requirements",
"(",
"ctx",
")",
":",
"for",
"directory",
"in",
"os",
".",
"listdir",
"(",
"settings",
".",
"ADDON_PATH",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"settings",
".",
"ADDON_PATH",
",",
"directory",
")",
"requirements_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'requirements.txt'",
")",
"if",
"(",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"requirements_file",
")",
")",
":",
"print",
"'Installing requirements for {0}'",
".",
"format",
"(",
"directory",
")",
"ctx",
".",
"run",
"(",
"pip_install",
"(",
"requirements_file",
",",
"constraints_file",
"=",
"CONSTRAINTS_PATH",
")",
",",
"echo",
"=",
"True",
")",
"print",
"'Finished installing addon requirements'"
] | install all addon requirements . | train | false |
15,673 | def _to_hass_temperature(temperature):
return (int((temperature * 346)) + 154)
| [
"def",
"_to_hass_temperature",
"(",
"temperature",
")",
":",
"return",
"(",
"int",
"(",
"(",
"temperature",
"*",
"346",
")",
")",
"+",
"154",
")"
] | convert percentage to home assistant color temperature units . | train | false |
15,674 | def update_datasource(jboss_config, name, new_properties, profile=None):
log.debug('======================== MODULE FUNCTION: jboss7.update_datasource, name=%s, profile=%s', name, profile)
ds_result = __read_datasource(jboss_config, name, profile)
current_properties = ds_result['result']
diff = dictdiffer.DictDiffer(new_properties, current_properties)
changed_properties = diff.changed()
ret = {'success': True, 'comment': ''}
if (len(changed_properties) > 0):
ds_resource_description = __get_datasource_resource_description(jboss_config, name, profile)
ds_attributes = ds_resource_description['attributes']
for key in changed_properties:
update_result = __update_datasource_property(jboss_config, name, key, new_properties[key], ds_attributes, profile)
if (not update_result['success']):
ret['result'] = False
ret['comment'] = (ret['comment'] + 'Could not update datasource property {0} with value {1},\n stdout: {2}\n'.format(key, new_properties[key], update_result['stdout']))
return ret
| [
"def",
"update_datasource",
"(",
"jboss_config",
",",
"name",
",",
"new_properties",
",",
"profile",
"=",
"None",
")",
":",
"log",
".",
"debug",
"(",
"'======================== MODULE FUNCTION: jboss7.update_datasource, name=%s, profile=%s'",
",",
"name",
",",
"profile",
")",
"ds_result",
"=",
"__read_datasource",
"(",
"jboss_config",
",",
"name",
",",
"profile",
")",
"current_properties",
"=",
"ds_result",
"[",
"'result'",
"]",
"diff",
"=",
"dictdiffer",
".",
"DictDiffer",
"(",
"new_properties",
",",
"current_properties",
")",
"changed_properties",
"=",
"diff",
".",
"changed",
"(",
")",
"ret",
"=",
"{",
"'success'",
":",
"True",
",",
"'comment'",
":",
"''",
"}",
"if",
"(",
"len",
"(",
"changed_properties",
")",
">",
"0",
")",
":",
"ds_resource_description",
"=",
"__get_datasource_resource_description",
"(",
"jboss_config",
",",
"name",
",",
"profile",
")",
"ds_attributes",
"=",
"ds_resource_description",
"[",
"'attributes'",
"]",
"for",
"key",
"in",
"changed_properties",
":",
"update_result",
"=",
"__update_datasource_property",
"(",
"jboss_config",
",",
"name",
",",
"key",
",",
"new_properties",
"[",
"key",
"]",
",",
"ds_attributes",
",",
"profile",
")",
"if",
"(",
"not",
"update_result",
"[",
"'success'",
"]",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"(",
"ret",
"[",
"'comment'",
"]",
"+",
"'Could not update datasource property {0} with value {1},\\n stdout: {2}\\n'",
".",
"format",
"(",
"key",
",",
"new_properties",
"[",
"key",
"]",
",",
"update_result",
"[",
"'stdout'",
"]",
")",
")",
"return",
"ret"
] | update a datasource . | train | true |
15,676 | def output():
return s3_rest_controller()
| [
"def",
"output",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] | the highstate outputter is only meant to be used with the state . | train | false |
15,677 | def assert_array_almost_equal(x, y, decimal=6, err_msg='', verbose=True):
numpy.testing.assert_array_almost_equal(cupy.asnumpy(x), cupy.asnumpy(y), decimal=decimal, err_msg=err_msg, verbose=verbose)
| [
"def",
"assert_array_almost_equal",
"(",
"x",
",",
"y",
",",
"decimal",
"=",
"6",
",",
"err_msg",
"=",
"''",
",",
"verbose",
"=",
"True",
")",
":",
"numpy",
".",
"testing",
".",
"assert_array_almost_equal",
"(",
"cupy",
".",
"asnumpy",
"(",
"x",
")",
",",
"cupy",
".",
"asnumpy",
"(",
"y",
")",
",",
"decimal",
"=",
"decimal",
",",
"err_msg",
"=",
"err_msg",
",",
"verbose",
"=",
"verbose",
")"
] | raises an assertionerror if objects are not equal up to desired precision . | train | false |
15,679 | def equal_weight(x, y):
return 1
| [
"def",
"equal_weight",
"(",
"x",
",",
"y",
")",
":",
"return",
"1"
] | equal_weight -> 1 . | train | false |
15,685 | def _default_chunk_size():
with io.open(__file__, u'r', encoding=u'latin1') as f:
return f._CHUNK_SIZE
| [
"def",
"_default_chunk_size",
"(",
")",
":",
"with",
"io",
".",
"open",
"(",
"__file__",
",",
"u'r'",
",",
"encoding",
"=",
"u'latin1'",
")",
"as",
"f",
":",
"return",
"f",
".",
"_CHUNK_SIZE"
] | get the default textiowrapper chunk size . | train | false |
15,686 | @Profiler.profile
def test_core_reuse_stmt(n):
stmt = select([Customer.__table__]).where((Customer.id == bindparam('id')))
with engine.connect() as conn:
for id_ in random.sample(ids, n):
row = conn.execute(stmt, id=id_).first()
tuple(row)
| [
"@",
"Profiler",
".",
"profile",
"def",
"test_core_reuse_stmt",
"(",
"n",
")",
":",
"stmt",
"=",
"select",
"(",
"[",
"Customer",
".",
"__table__",
"]",
")",
".",
"where",
"(",
"(",
"Customer",
".",
"id",
"==",
"bindparam",
"(",
"'id'",
")",
")",
")",
"with",
"engine",
".",
"connect",
"(",
")",
"as",
"conn",
":",
"for",
"id_",
"in",
"random",
".",
"sample",
"(",
"ids",
",",
"n",
")",
":",
"row",
"=",
"conn",
".",
"execute",
"(",
"stmt",
",",
"id",
"=",
"id_",
")",
".",
"first",
"(",
")",
"tuple",
"(",
"row",
")"
] | test core . | train | false |
15,687 | @pytest.fixture
def temporary_table():
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset(DATASET_ID)
tables = []
def factory(table_name):
new_table = dataset.table(table_name)
if new_table.exists():
new_table.delete()
tables.append(new_table)
return new_table
(yield factory)
for table in tables:
if table.exists():
table.delete()
| [
"@",
"pytest",
".",
"fixture",
"def",
"temporary_table",
"(",
")",
":",
"bigquery_client",
"=",
"bigquery",
".",
"Client",
"(",
")",
"dataset",
"=",
"bigquery_client",
".",
"dataset",
"(",
"DATASET_ID",
")",
"tables",
"=",
"[",
"]",
"def",
"factory",
"(",
"table_name",
")",
":",
"new_table",
"=",
"dataset",
".",
"table",
"(",
"table_name",
")",
"if",
"new_table",
".",
"exists",
"(",
")",
":",
"new_table",
".",
"delete",
"(",
")",
"tables",
".",
"append",
"(",
"new_table",
")",
"return",
"new_table",
"(",
"yield",
"factory",
")",
"for",
"table",
"in",
"tables",
":",
"if",
"table",
".",
"exists",
"(",
")",
":",
"table",
".",
"delete",
"(",
")"
] | fixture that returns a factory for tables that do not yet exist and will be automatically deleted after the test . | train | false |
15,689 | def get_tex_path_variable_texlive(variable, env=None):
print('Reading path for {0}...'.format(variable))
if (env is None):
env = os.environ
try:
t = SubprocessTimeoutThread(30, ['kpsewhich', ('--expand-path=$' + variable)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=False, env=env)
t.start()
stdout = t.stdout
if (stdout is None):
return None
return u'\n'.join(re.split('\\r?\\n', stdout.decode('utf-8').strip()))
except:
return None
| [
"def",
"get_tex_path_variable_texlive",
"(",
"variable",
",",
"env",
"=",
"None",
")",
":",
"print",
"(",
"'Reading path for {0}...'",
".",
"format",
"(",
"variable",
")",
")",
"if",
"(",
"env",
"is",
"None",
")",
":",
"env",
"=",
"os",
".",
"environ",
"try",
":",
"t",
"=",
"SubprocessTimeoutThread",
"(",
"30",
",",
"[",
"'kpsewhich'",
",",
"(",
"'--expand-path=$'",
"+",
"variable",
")",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
",",
"shell",
"=",
"False",
",",
"env",
"=",
"env",
")",
"t",
".",
"start",
"(",
")",
"stdout",
"=",
"t",
".",
"stdout",
"if",
"(",
"stdout",
"is",
"None",
")",
":",
"return",
"None",
"return",
"u'\\n'",
".",
"join",
"(",
"re",
".",
"split",
"(",
"'\\\\r?\\\\n'",
",",
"stdout",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"strip",
"(",
")",
")",
")",
"except",
":",
"return",
"None"
] | uses kpsewhich to read the value of a given tex path variable . | train | false |
15,691 | def url_path_parts(url):
return url_path(url).split('/')
| [
"def",
"url_path_parts",
"(",
"url",
")",
":",
"return",
"url_path",
"(",
"url",
")",
".",
"split",
"(",
"'/'",
")"
] | return the path segments of a url . | train | false |
15,692 | def set_boot_options(module, image_name, kickstart=None):
commands = ['terminal dont-ask']
if (kickstart is None):
commands.append(('install all nxos %s' % image_name))
else:
commands.append(('install all system %s kickstart %s' % (image_name, kickstart)))
execute_config_command(commands, module)
| [
"def",
"set_boot_options",
"(",
"module",
",",
"image_name",
",",
"kickstart",
"=",
"None",
")",
":",
"commands",
"=",
"[",
"'terminal dont-ask'",
"]",
"if",
"(",
"kickstart",
"is",
"None",
")",
":",
"commands",
".",
"append",
"(",
"(",
"'install all nxos %s'",
"%",
"image_name",
")",
")",
"else",
":",
"commands",
".",
"append",
"(",
"(",
"'install all system %s kickstart %s'",
"%",
"(",
"image_name",
",",
"kickstart",
")",
")",
")",
"execute_config_command",
"(",
"commands",
",",
"module",
")"
] | set boot variables like system image and kickstart image . | train | false |
15,694 | def test_suite():
return DocTestSuite()
| [
"def",
"test_suite",
"(",
")",
":",
"return",
"DocTestSuite",
"(",
")"
] | for the z3 test runner . | train | false |
15,696 | def teardown_module():
reload_module(db_replicator)
| [
"def",
"teardown_module",
"(",
")",
":",
"reload_module",
"(",
"db_replicator",
")"
] | tear down test fixtures . | train | false |
15,697 | def set_ev_cls(ev_cls, dispatchers=None):
def _set_ev_cls_dec(handler):
if ('callers' not in dir(handler)):
handler.callers = {}
for e in _listify(ev_cls):
handler.callers[e] = _Caller(_listify(dispatchers), e.__module__)
return handler
return _set_ev_cls_dec
| [
"def",
"set_ev_cls",
"(",
"ev_cls",
",",
"dispatchers",
"=",
"None",
")",
":",
"def",
"_set_ev_cls_dec",
"(",
"handler",
")",
":",
"if",
"(",
"'callers'",
"not",
"in",
"dir",
"(",
"handler",
")",
")",
":",
"handler",
".",
"callers",
"=",
"{",
"}",
"for",
"e",
"in",
"_listify",
"(",
"ev_cls",
")",
":",
"handler",
".",
"callers",
"[",
"e",
"]",
"=",
"_Caller",
"(",
"_listify",
"(",
"dispatchers",
")",
",",
"e",
".",
"__module__",
")",
"return",
"handler",
"return",
"_set_ev_cls_dec"
] | a decorator for ryu application to declare an event handler . | train | true |
15,698 | def test_icon():
schema = vol.Schema(cv.icon)
for value in (False, 'work', 'icon:work'):
with pytest.raises(vol.MultipleInvalid):
schema(value)
schema('mdi:work')
| [
"def",
"test_icon",
"(",
")",
":",
"schema",
"=",
"vol",
".",
"Schema",
"(",
"cv",
".",
"icon",
")",
"for",
"value",
"in",
"(",
"False",
",",
"'work'",
",",
"'icon:work'",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"MultipleInvalid",
")",
":",
"schema",
"(",
"value",
")",
"schema",
"(",
"'mdi:work'",
")"
] | test icon validation . | train | false |
15,700 | def next_rising(hass, entity_id=None):
utc_next = next_rising_utc(hass, entity_id)
return (dt_util.as_local(utc_next) if utc_next else None)
| [
"def",
"next_rising",
"(",
"hass",
",",
"entity_id",
"=",
"None",
")",
":",
"utc_next",
"=",
"next_rising_utc",
"(",
"hass",
",",
"entity_id",
")",
"return",
"(",
"dt_util",
".",
"as_local",
"(",
"utc_next",
")",
"if",
"utc_next",
"else",
"None",
")"
] | local datetime object of the next sun rising . | train | false |
15,701 | def get_vlanid_and_vswitch_for_portgroup(session, pg_name, cluster=None):
host_mor = vm_util.get_host_ref(session, cluster)
port_grps_on_host_ret = session._call_method(vutil, 'get_object_property', host_mor, 'config.network.portgroup')
if (not port_grps_on_host_ret):
msg = _('ESX SOAP server returned an empty port group for the host system in its response')
LOG.error(msg)
raise exception.NovaException(msg)
port_grps_on_host = port_grps_on_host_ret.HostPortGroup
for p_gp in port_grps_on_host:
if (p_gp.spec.name == pg_name):
p_grp_vswitch_name = p_gp.spec.vswitchName
return (p_gp.spec.vlanId, p_grp_vswitch_name)
return (None, None)
| [
"def",
"get_vlanid_and_vswitch_for_portgroup",
"(",
"session",
",",
"pg_name",
",",
"cluster",
"=",
"None",
")",
":",
"host_mor",
"=",
"vm_util",
".",
"get_host_ref",
"(",
"session",
",",
"cluster",
")",
"port_grps_on_host_ret",
"=",
"session",
".",
"_call_method",
"(",
"vutil",
",",
"'get_object_property'",
",",
"host_mor",
",",
"'config.network.portgroup'",
")",
"if",
"(",
"not",
"port_grps_on_host_ret",
")",
":",
"msg",
"=",
"_",
"(",
"'ESX SOAP server returned an empty port group for the host system in its response'",
")",
"LOG",
".",
"error",
"(",
"msg",
")",
"raise",
"exception",
".",
"NovaException",
"(",
"msg",
")",
"port_grps_on_host",
"=",
"port_grps_on_host_ret",
".",
"HostPortGroup",
"for",
"p_gp",
"in",
"port_grps_on_host",
":",
"if",
"(",
"p_gp",
".",
"spec",
".",
"name",
"==",
"pg_name",
")",
":",
"p_grp_vswitch_name",
"=",
"p_gp",
".",
"spec",
".",
"vswitchName",
"return",
"(",
"p_gp",
".",
"spec",
".",
"vlanId",
",",
"p_grp_vswitch_name",
")",
"return",
"(",
"None",
",",
"None",
")"
] | get the vlan id and vswicth associated with the port group . | train | false |
15,702 | @pytest.mark.parametrize('text, deleted, rest', [fixme(('test foobar| delete', ' delete', 'test foobar|')), ('test foobar| delete', ' ', 'test foobar|delete'), fixme(('test foo|delete bar', 'delete', 'test foo| bar')), ('test foo|delete bar', 'delete ', 'test foo|bar'), fixme(('test foo<bar> delete', ' delete', 'test foobar|')), ('test foo<bar>delete', 'bardelete', 'test foo|')])
def test_rl_kill_word(lineedit, bridge, text, deleted, rest):
_validate_deletion(lineedit, bridge, bridge.rl_kill_word, text, deleted, rest)
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'text, deleted, rest'",
",",
"[",
"fixme",
"(",
"(",
"'test foobar| delete'",
",",
"' delete'",
",",
"'test foobar|'",
")",
")",
",",
"(",
"'test foobar| delete'",
",",
"' '",
",",
"'test foobar|delete'",
")",
",",
"fixme",
"(",
"(",
"'test foo|delete bar'",
",",
"'delete'",
",",
"'test foo| bar'",
")",
")",
",",
"(",
"'test foo|delete bar'",
",",
"'delete '",
",",
"'test foo|bar'",
")",
",",
"fixme",
"(",
"(",
"'test foo<bar> delete'",
",",
"' delete'",
",",
"'test foobar|'",
")",
")",
",",
"(",
"'test foo<bar>delete'",
",",
"'bardelete'",
",",
"'test foo|'",
")",
"]",
")",
"def",
"test_rl_kill_word",
"(",
"lineedit",
",",
"bridge",
",",
"text",
",",
"deleted",
",",
"rest",
")",
":",
"_validate_deletion",
"(",
"lineedit",
",",
"bridge",
",",
"bridge",
".",
"rl_kill_word",
",",
"text",
",",
"deleted",
",",
"rest",
")"
] | delete to word end and see if it comes back with yank . | train | false |
15,703 | def endswith_lf(line):
return line.endswith(('\n' if isinstance(line, str) else '\n'))
| [
"def",
"endswith_lf",
"(",
"line",
")",
":",
"return",
"line",
".",
"endswith",
"(",
"(",
"'\\n'",
"if",
"isinstance",
"(",
"line",
",",
"str",
")",
"else",
"'\\n'",
")",
")"
] | return true if line ends with . | train | false |
15,705 | def _register_parser(cls):
assert (cls.cls_msg_type is not None)
assert (cls.cls_msg_type not in _MSG_PARSERS)
_MSG_PARSERS[cls.cls_msg_type] = cls.parser
return cls
| [
"def",
"_register_parser",
"(",
"cls",
")",
":",
"assert",
"(",
"cls",
".",
"cls_msg_type",
"is",
"not",
"None",
")",
"assert",
"(",
"cls",
".",
"cls_msg_type",
"not",
"in",
"_MSG_PARSERS",
")",
"_MSG_PARSERS",
"[",
"cls",
".",
"cls_msg_type",
"]",
"=",
"cls",
".",
"parser",
"return",
"cls"
] | class decorator to register msg parser . | train | true |
15,706 | def _consume_subscriptions(topic):
return list(topic.list_subscriptions())
| [
"def",
"_consume_subscriptions",
"(",
"topic",
")",
":",
"return",
"list",
"(",
"topic",
".",
"list_subscriptions",
"(",
")",
")"
] | consume entire iterator . | train | false |
15,707 | def MkdirFileLock(*args, **kwds):
from . import mkdirlockfile
return _fl_helper(mkdirlockfile.MkdirLockFile, 'lockfile.mkdirlockfile', *args, **kwds)
| [
"def",
"MkdirFileLock",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
":",
"from",
".",
"import",
"mkdirlockfile",
"return",
"_fl_helper",
"(",
"mkdirlockfile",
".",
"MkdirLockFile",
",",
"'lockfile.mkdirlockfile'",
",",
"*",
"args",
",",
"**",
"kwds",
")"
] | factory function provided for backwards compatibility . | train | false |
15,708 | @salt.utils.decorators.memoize
def _is_globalzone():
if (not (__grains__['kernel'] == 'SunOS')):
return False
zonename = __salt__['cmd.run_all']('zonename')
if zonename['retcode']:
return False
if (zonename['stdout'] == 'global'):
return True
return False
| [
"@",
"salt",
".",
"utils",
".",
"decorators",
".",
"memoize",
"def",
"_is_globalzone",
"(",
")",
":",
"if",
"(",
"not",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"==",
"'SunOS'",
")",
")",
":",
"return",
"False",
"zonename",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"'zonename'",
")",
"if",
"zonename",
"[",
"'retcode'",
"]",
":",
"return",
"False",
"if",
"(",
"zonename",
"[",
"'stdout'",
"]",
"==",
"'global'",
")",
":",
"return",
"True",
"return",
"False"
] | check if we are running in the globalzone . | train | false |
15,709 | def token_lists(q, phrases=True):
if q.is_leaf():
if (phrases or (not isinstance(q, Phrase))):
return list(q.tokens())
else:
ls = []
for qq in q.children():
t = token_lists(qq, phrases=phrases)
if (len(t) == 1):
t = t[0]
if t:
ls.append(t)
return ls
| [
"def",
"token_lists",
"(",
"q",
",",
"phrases",
"=",
"True",
")",
":",
"if",
"q",
".",
"is_leaf",
"(",
")",
":",
"if",
"(",
"phrases",
"or",
"(",
"not",
"isinstance",
"(",
"q",
",",
"Phrase",
")",
")",
")",
":",
"return",
"list",
"(",
"q",
".",
"tokens",
"(",
")",
")",
"else",
":",
"ls",
"=",
"[",
"]",
"for",
"qq",
"in",
"q",
".",
"children",
"(",
")",
":",
"t",
"=",
"token_lists",
"(",
"qq",
",",
"phrases",
"=",
"phrases",
")",
"if",
"(",
"len",
"(",
"t",
")",
"==",
"1",
")",
":",
"t",
"=",
"t",
"[",
"0",
"]",
"if",
"t",
":",
"ls",
".",
"append",
"(",
"t",
")",
"return",
"ls"
] | returns the terms in the query tree . | train | false |
15,710 | def create_rpc(deadline=None, callback=None):
if (deadline is None):
deadline = get_default_fetch_deadline()
return apiproxy_stub_map.UserRPC('urlfetch', deadline, callback)
| [
"def",
"create_rpc",
"(",
"deadline",
"=",
"None",
",",
"callback",
"=",
"None",
")",
":",
"if",
"(",
"deadline",
"is",
"None",
")",
":",
"deadline",
"=",
"get_default_fetch_deadline",
"(",
")",
"return",
"apiproxy_stub_map",
".",
"UserRPC",
"(",
"'urlfetch'",
",",
"deadline",
",",
"callback",
")"
] | creates an rpc object for use with the app identity api . | train | false |
15,711 | def get_public_endpoint(path=''):
return get_endpoint(settings.EDXNOTES_PUBLIC_API, path)
| [
"def",
"get_public_endpoint",
"(",
"path",
"=",
"''",
")",
":",
"return",
"get_endpoint",
"(",
"settings",
".",
"EDXNOTES_PUBLIC_API",
",",
"path",
")"
] | get the full path to a resource on the public notes api . | train | false |
15,712 | def remove_service_protocol(service, protocol):
cmd = '--permanent --service={0} --remove-protocol={1}'.format(service, protocol)
return __firewall_cmd(cmd)
| [
"def",
"remove_service_protocol",
"(",
"service",
",",
"protocol",
")",
":",
"cmd",
"=",
"'--permanent --service={0} --remove-protocol={1}'",
".",
"format",
"(",
"service",
",",
"protocol",
")",
"return",
"__firewall_cmd",
"(",
"cmd",
")"
] | remove a protocol from the specified service . | train | false |
15,713 | def get_parent_notification_type(node, event, user):
AbstractNode = apps.get_model('osf.AbstractNode')
NotificationSubscription = apps.get_model('osf.NotificationSubscription')
if (node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, 'read')):
parent = node.parent_node
key = to_subscription_key(parent._id, event)
try:
subscription = NotificationSubscription.find_one(Q('_id', 'eq', key))
except NoResultsFound:
return get_parent_notification_type(parent, event, user)
for notification_type in constants.NOTIFICATION_TYPES:
if getattr(subscription, notification_type).filter(id=user.id).exists():
return notification_type
else:
return get_parent_notification_type(parent, event, user)
else:
return None
| [
"def",
"get_parent_notification_type",
"(",
"node",
",",
"event",
",",
"user",
")",
":",
"AbstractNode",
"=",
"apps",
".",
"get_model",
"(",
"'osf.AbstractNode'",
")",
"NotificationSubscription",
"=",
"apps",
".",
"get_model",
"(",
"'osf.NotificationSubscription'",
")",
"if",
"(",
"node",
"and",
"isinstance",
"(",
"node",
",",
"AbstractNode",
")",
"and",
"node",
".",
"parent_node",
"and",
"node",
".",
"parent_node",
".",
"has_permission",
"(",
"user",
",",
"'read'",
")",
")",
":",
"parent",
"=",
"node",
".",
"parent_node",
"key",
"=",
"to_subscription_key",
"(",
"parent",
".",
"_id",
",",
"event",
")",
"try",
":",
"subscription",
"=",
"NotificationSubscription",
".",
"find_one",
"(",
"Q",
"(",
"'_id'",
",",
"'eq'",
",",
"key",
")",
")",
"except",
"NoResultsFound",
":",
"return",
"get_parent_notification_type",
"(",
"parent",
",",
"event",
",",
"user",
")",
"for",
"notification_type",
"in",
"constants",
".",
"NOTIFICATION_TYPES",
":",
"if",
"getattr",
"(",
"subscription",
",",
"notification_type",
")",
".",
"filter",
"(",
"id",
"=",
"user",
".",
"id",
")",
".",
"exists",
"(",
")",
":",
"return",
"notification_type",
"else",
":",
"return",
"get_parent_notification_type",
"(",
"parent",
",",
"event",
",",
"user",
")",
"else",
":",
"return",
"None"
] | given an event on a node . | train | false |
15,715 | def _score_slices(y_true, list_y_pred, scorer, score_mode, cv):
scores_list = list()
for y_pred in list_y_pred:
scores = list()
for (t, this_y_pred) in enumerate(y_pred):
if (score_mode in ['mean-fold-wise', 'fold-wise']):
scores_ = list()
for (train, test) in cv:
scores_.append(scorer(y_true[test], this_y_pred[test]))
scores_ = np.array(scores_)
if (score_mode == 'mean-fold-wise'):
scores_ = np.mean(scores_, axis=0)
elif (score_mode == 'mean-sample-wise'):
scores_ = scorer(y_true, this_y_pred)
scores.append(scores_)
scores_list.append(scores)
return scores_list
| [
"def",
"_score_slices",
"(",
"y_true",
",",
"list_y_pred",
",",
"scorer",
",",
"score_mode",
",",
"cv",
")",
":",
"scores_list",
"=",
"list",
"(",
")",
"for",
"y_pred",
"in",
"list_y_pred",
":",
"scores",
"=",
"list",
"(",
")",
"for",
"(",
"t",
",",
"this_y_pred",
")",
"in",
"enumerate",
"(",
"y_pred",
")",
":",
"if",
"(",
"score_mode",
"in",
"[",
"'mean-fold-wise'",
",",
"'fold-wise'",
"]",
")",
":",
"scores_",
"=",
"list",
"(",
")",
"for",
"(",
"train",
",",
"test",
")",
"in",
"cv",
":",
"scores_",
".",
"append",
"(",
"scorer",
"(",
"y_true",
"[",
"test",
"]",
",",
"this_y_pred",
"[",
"test",
"]",
")",
")",
"scores_",
"=",
"np",
".",
"array",
"(",
"scores_",
")",
"if",
"(",
"score_mode",
"==",
"'mean-fold-wise'",
")",
":",
"scores_",
"=",
"np",
".",
"mean",
"(",
"scores_",
",",
"axis",
"=",
"0",
")",
"elif",
"(",
"score_mode",
"==",
"'mean-sample-wise'",
")",
":",
"scores_",
"=",
"scorer",
"(",
"y_true",
",",
"this_y_pred",
")",
"scores",
".",
"append",
"(",
"scores_",
")",
"scores_list",
".",
"append",
"(",
"scores",
")",
"return",
"scores_list"
] | loop across chunks of testing slices . | train | false |
15,716 | def finalize(filename, content_type=RAW):
if (not filename):
raise InvalidArgumentError('Filename is empty')
if (not isinstance(filename, basestring)):
raise InvalidArgumentError('Filename should be a string')
if (content_type != RAW):
raise InvalidArgumentError('Invalid content type')
try:
f = open(filename, 'a', exclusive_lock=True, content_type=content_type)
f.close(finalize=True)
except FinalizationError:
pass
| [
"def",
"finalize",
"(",
"filename",
",",
"content_type",
"=",
"RAW",
")",
":",
"if",
"(",
"not",
"filename",
")",
":",
"raise",
"InvalidArgumentError",
"(",
"'Filename is empty'",
")",
"if",
"(",
"not",
"isinstance",
"(",
"filename",
",",
"basestring",
")",
")",
":",
"raise",
"InvalidArgumentError",
"(",
"'Filename should be a string'",
")",
"if",
"(",
"content_type",
"!=",
"RAW",
")",
":",
"raise",
"InvalidArgumentError",
"(",
"'Invalid content type'",
")",
"try",
":",
"f",
"=",
"open",
"(",
"filename",
",",
"'a'",
",",
"exclusive_lock",
"=",
"True",
",",
"content_type",
"=",
"content_type",
")",
"f",
".",
"close",
"(",
"finalize",
"=",
"True",
")",
"except",
"FinalizationError",
":",
"pass"
] | resolve conflicts and assign keys to every action in shortcuts . | train | false |
15,717 | def get_programs(user, program_id=None, use_catalog=False):
if use_catalog:
programs = [munge_catalog_program(program) for program in get_catalog_programs(user)]
else:
programs_config = ProgramsApiConfig.current()
cache_key = (programs_config.CACHE_KEY if (programs_config.is_cache_enabled and (not user.is_staff)) else None)
programs = get_edx_api_data(programs_config, user, 'programs', resource_id=program_id, cache_key=cache_key)
if (not program_id):
programs += [munge_catalog_program(micromaster) for micromaster in get_catalog_programs(user, type='MicroMasters')]
return programs
| [
"def",
"get_programs",
"(",
"user",
",",
"program_id",
"=",
"None",
",",
"use_catalog",
"=",
"False",
")",
":",
"if",
"use_catalog",
":",
"programs",
"=",
"[",
"munge_catalog_program",
"(",
"program",
")",
"for",
"program",
"in",
"get_catalog_programs",
"(",
"user",
")",
"]",
"else",
":",
"programs_config",
"=",
"ProgramsApiConfig",
".",
"current",
"(",
")",
"cache_key",
"=",
"(",
"programs_config",
".",
"CACHE_KEY",
"if",
"(",
"programs_config",
".",
"is_cache_enabled",
"and",
"(",
"not",
"user",
".",
"is_staff",
")",
")",
"else",
"None",
")",
"programs",
"=",
"get_edx_api_data",
"(",
"programs_config",
",",
"user",
",",
"'programs'",
",",
"resource_id",
"=",
"program_id",
",",
"cache_key",
"=",
"cache_key",
")",
"if",
"(",
"not",
"program_id",
")",
":",
"programs",
"+=",
"[",
"munge_catalog_program",
"(",
"micromaster",
")",
"for",
"micromaster",
"in",
"get_catalog_programs",
"(",
"user",
",",
"type",
"=",
"'MicroMasters'",
")",
"]",
"return",
"programs"
] | retrieve marketable programs from the catalog service . | train | false |
15,718 | def getNewRepository():
return ExportRepository()
| [
"def",
"getNewRepository",
"(",
")",
":",
"return",
"ExportRepository",
"(",
")"
] | get new repository . | train | false |
15,719 | def _filterwarnings(filters, quiet=False):
frame = sys._getframe(2)
registry = frame.f_globals.get('__warningregistry__')
if registry:
registry.clear()
with warnings.catch_warnings(record=True) as w:
sys.modules['warnings'].simplefilter('always')
(yield WarningsRecorder(w))
reraise = [warning.message for warning in w]
missing = []
for (msg, cat) in filters:
seen = False
for exc in reraise[:]:
message = str(exc)
if (re.match(msg, message, re.I) and issubclass(exc.__class__, cat)):
seen = True
reraise.remove(exc)
if ((not seen) and (not quiet)):
missing.append((msg, cat.__name__))
if reraise:
raise AssertionError(('unhandled warning %r' % reraise[0]))
if missing:
raise AssertionError(('filter (%r, %s) did not catch any warning' % missing[0]))
| [
"def",
"_filterwarnings",
"(",
"filters",
",",
"quiet",
"=",
"False",
")",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"2",
")",
"registry",
"=",
"frame",
".",
"f_globals",
".",
"get",
"(",
"'__warningregistry__'",
")",
"if",
"registry",
":",
"registry",
".",
"clear",
"(",
")",
"with",
"warnings",
".",
"catch_warnings",
"(",
"record",
"=",
"True",
")",
"as",
"w",
":",
"sys",
".",
"modules",
"[",
"'warnings'",
"]",
".",
"simplefilter",
"(",
"'always'",
")",
"(",
"yield",
"WarningsRecorder",
"(",
"w",
")",
")",
"reraise",
"=",
"[",
"warning",
".",
"message",
"for",
"warning",
"in",
"w",
"]",
"missing",
"=",
"[",
"]",
"for",
"(",
"msg",
",",
"cat",
")",
"in",
"filters",
":",
"seen",
"=",
"False",
"for",
"exc",
"in",
"reraise",
"[",
":",
"]",
":",
"message",
"=",
"str",
"(",
"exc",
")",
"if",
"(",
"re",
".",
"match",
"(",
"msg",
",",
"message",
",",
"re",
".",
"I",
")",
"and",
"issubclass",
"(",
"exc",
".",
"__class__",
",",
"cat",
")",
")",
":",
"seen",
"=",
"True",
"reraise",
".",
"remove",
"(",
"exc",
")",
"if",
"(",
"(",
"not",
"seen",
")",
"and",
"(",
"not",
"quiet",
")",
")",
":",
"missing",
".",
"append",
"(",
"(",
"msg",
",",
"cat",
".",
"__name__",
")",
")",
"if",
"reraise",
":",
"raise",
"AssertionError",
"(",
"(",
"'unhandled warning %r'",
"%",
"reraise",
"[",
"0",
"]",
")",
")",
"if",
"missing",
":",
"raise",
"AssertionError",
"(",
"(",
"'filter (%r, %s) did not catch any warning'",
"%",
"missing",
"[",
"0",
"]",
")",
")"
] | catch the warnings . | train | false |
15,720 | @click.command(u'clear-limits')
@click.option(u'--site', help=u'site name')
@click.argument(u'limits', nargs=(-1), type=click.Choice([u'emails', u'space', u'users', u'email_group', u'expiry', u'support_email', u'support_chat', u'upgrade_url']))
@pass_context
def clear_limits(context, site, limits):
from frappe.limits import clear_limit as _clear_limit
if (not limits):
return
if (not site):
site = get_site(context)
with frappe.init_site(site):
_clear_limit(limits)
limits = get_limits()
if (not limits):
update_site_config(u'limits', u'None', validate=False)
| [
"@",
"click",
".",
"command",
"(",
"u'clear-limits'",
")",
"@",
"click",
".",
"option",
"(",
"u'--site'",
",",
"help",
"=",
"u'site name'",
")",
"@",
"click",
".",
"argument",
"(",
"u'limits'",
",",
"nargs",
"=",
"(",
"-",
"1",
")",
",",
"type",
"=",
"click",
".",
"Choice",
"(",
"[",
"u'emails'",
",",
"u'space'",
",",
"u'users'",
",",
"u'email_group'",
",",
"u'expiry'",
",",
"u'support_email'",
",",
"u'support_chat'",
",",
"u'upgrade_url'",
"]",
")",
")",
"@",
"pass_context",
"def",
"clear_limits",
"(",
"context",
",",
"site",
",",
"limits",
")",
":",
"from",
"frappe",
".",
"limits",
"import",
"clear_limit",
"as",
"_clear_limit",
"if",
"(",
"not",
"limits",
")",
":",
"return",
"if",
"(",
"not",
"site",
")",
":",
"site",
"=",
"get_site",
"(",
"context",
")",
"with",
"frappe",
".",
"init_site",
"(",
"site",
")",
":",
"_clear_limit",
"(",
"limits",
")",
"limits",
"=",
"get_limits",
"(",
")",
"if",
"(",
"not",
"limits",
")",
":",
"update_site_config",
"(",
"u'limits'",
",",
"u'None'",
",",
"validate",
"=",
"False",
")"
] | clears given limit from the site config . | train | false |
15,721 | def init(mpstate):
return SerialModule(mpstate)
| [
"def",
"init",
"(",
"mpstate",
")",
":",
"return",
"SerialModule",
"(",
"mpstate",
")"
] | change the system runlevel on sysv compatible systems cli example: . | train | false |
15,722 | def indentsize(line):
expline = string.expandtabs(line)
return (len(expline) - len(string.lstrip(expline)))
| [
"def",
"indentsize",
"(",
"line",
")",
":",
"expline",
"=",
"string",
".",
"expandtabs",
"(",
"line",
")",
"return",
"(",
"len",
"(",
"expline",
")",
"-",
"len",
"(",
"string",
".",
"lstrip",
"(",
"expline",
")",
")",
")"
] | return the indent size . | train | true |
15,724 | def _MarkerFactory(marker_code, stream, offset):
if (marker_code == JPEG_MARKER_CODE.APP0):
marker_cls = _App0Marker
elif (marker_code == JPEG_MARKER_CODE.APP1):
marker_cls = _App1Marker
elif (marker_code in JPEG_MARKER_CODE.SOF_MARKER_CODES):
marker_cls = _SofMarker
else:
marker_cls = _Marker
return marker_cls.from_stream(stream, marker_code, offset)
| [
"def",
"_MarkerFactory",
"(",
"marker_code",
",",
"stream",
",",
"offset",
")",
":",
"if",
"(",
"marker_code",
"==",
"JPEG_MARKER_CODE",
".",
"APP0",
")",
":",
"marker_cls",
"=",
"_App0Marker",
"elif",
"(",
"marker_code",
"==",
"JPEG_MARKER_CODE",
".",
"APP1",
")",
":",
"marker_cls",
"=",
"_App1Marker",
"elif",
"(",
"marker_code",
"in",
"JPEG_MARKER_CODE",
".",
"SOF_MARKER_CODES",
")",
":",
"marker_cls",
"=",
"_SofMarker",
"else",
":",
"marker_cls",
"=",
"_Marker",
"return",
"marker_cls",
".",
"from_stream",
"(",
"stream",
",",
"marker_code",
",",
"offset",
")"
] | return |_marker| or subclass instance appropriate for marker at *offset* in *stream* having *marker_code* . | train | true |
15,725 | def get_volume_size(path):
try:
(out, _err) = utils.execute('blockdev', '--getsize64', path, run_as_root=True)
except processutils.ProcessExecutionError:
if (not utils.path_exists(path)):
raise exception.VolumeBDMPathNotFound(path=path)
else:
raise
return int(out)
| [
"def",
"get_volume_size",
"(",
"path",
")",
":",
"try",
":",
"(",
"out",
",",
"_err",
")",
"=",
"utils",
".",
"execute",
"(",
"'blockdev'",
",",
"'--getsize64'",
",",
"path",
",",
"run_as_root",
"=",
"True",
")",
"except",
"processutils",
".",
"ProcessExecutionError",
":",
"if",
"(",
"not",
"utils",
".",
"path_exists",
"(",
"path",
")",
")",
":",
"raise",
"exception",
".",
"VolumeBDMPathNotFound",
"(",
"path",
"=",
"path",
")",
"else",
":",
"raise",
"return",
"int",
"(",
"out",
")"
] | calculate the volume size . | train | false |
15,727 | def get_optparser(cmdpath, usage=None):
command = os.path.basename(cmdpath)
if re.match('mne_(.*).py', command):
command = command[4:(-3)]
elif re.match('mne_(.*).pyc', command):
command = command[4:(-4)]
if cmdpath.endswith('.pyc'):
mod = imp.load_compiled('__temp', cmdpath)
else:
mod = imp.load_source('__temp', cmdpath)
if mod.__doc__:
(doc, description, epilog) = (mod.__doc__, None, None)
doc_lines = doc.split('\n')
description = doc_lines[0]
if (len(doc_lines) > 1):
epilog = '\n'.join(doc_lines[1:])
OptionParser.format_epilog = (lambda self, formatter: self.epilog)
parser = OptionParser(prog=('mne %s' % command), version=mne.__version__, description=description, epilog=epilog, usage=usage)
return parser
| [
"def",
"get_optparser",
"(",
"cmdpath",
",",
"usage",
"=",
"None",
")",
":",
"command",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"cmdpath",
")",
"if",
"re",
".",
"match",
"(",
"'mne_(.*).py'",
",",
"command",
")",
":",
"command",
"=",
"command",
"[",
"4",
":",
"(",
"-",
"3",
")",
"]",
"elif",
"re",
".",
"match",
"(",
"'mne_(.*).pyc'",
",",
"command",
")",
":",
"command",
"=",
"command",
"[",
"4",
":",
"(",
"-",
"4",
")",
"]",
"if",
"cmdpath",
".",
"endswith",
"(",
"'.pyc'",
")",
":",
"mod",
"=",
"imp",
".",
"load_compiled",
"(",
"'__temp'",
",",
"cmdpath",
")",
"else",
":",
"mod",
"=",
"imp",
".",
"load_source",
"(",
"'__temp'",
",",
"cmdpath",
")",
"if",
"mod",
".",
"__doc__",
":",
"(",
"doc",
",",
"description",
",",
"epilog",
")",
"=",
"(",
"mod",
".",
"__doc__",
",",
"None",
",",
"None",
")",
"doc_lines",
"=",
"doc",
".",
"split",
"(",
"'\\n'",
")",
"description",
"=",
"doc_lines",
"[",
"0",
"]",
"if",
"(",
"len",
"(",
"doc_lines",
")",
">",
"1",
")",
":",
"epilog",
"=",
"'\\n'",
".",
"join",
"(",
"doc_lines",
"[",
"1",
":",
"]",
")",
"OptionParser",
".",
"format_epilog",
"=",
"(",
"lambda",
"self",
",",
"formatter",
":",
"self",
".",
"epilog",
")",
"parser",
"=",
"OptionParser",
"(",
"prog",
"=",
"(",
"'mne %s'",
"%",
"command",
")",
",",
"version",
"=",
"mne",
".",
"__version__",
",",
"description",
"=",
"description",
",",
"epilog",
"=",
"epilog",
",",
"usage",
"=",
"usage",
")",
"return",
"parser"
] | create optionparser with cmd specific settings . | train | false |
15,728 | @register.simple_tag
def message_url(obj):
try:
content_type = ContentType.objects.get(app_label=obj._meta.app_label, model=obj._meta.module_name)
return reverse('messages:create', kwargs={'content_type_id': base62.from_decimal(content_type.pk), 'object_id': base62.from_decimal(obj.pk)})
except AttributeError:
return ''
| [
"@",
"register",
".",
"simple_tag",
"def",
"message_url",
"(",
"obj",
")",
":",
"try",
":",
"content_type",
"=",
"ContentType",
".",
"objects",
".",
"get",
"(",
"app_label",
"=",
"obj",
".",
"_meta",
".",
"app_label",
",",
"model",
"=",
"obj",
".",
"_meta",
".",
"module_name",
")",
"return",
"reverse",
"(",
"'messages:create'",
",",
"kwargs",
"=",
"{",
"'content_type_id'",
":",
"base62",
".",
"from_decimal",
"(",
"content_type",
".",
"pk",
")",
",",
"'object_id'",
":",
"base62",
".",
"from_decimal",
"(",
"obj",
".",
"pk",
")",
"}",
")",
"except",
"AttributeError",
":",
"return",
"''"
] | given an object . | train | false |
15,729 | def route_path(route_name, request, *elements, **kw):
return request.route_path(route_name, *elements, **kw)
| [
"def",
"route_path",
"(",
"route_name",
",",
"request",
",",
"*",
"elements",
",",
"**",
"kw",
")",
":",
"return",
"request",
".",
"route_path",
"(",
"route_name",
",",
"*",
"elements",
",",
"**",
"kw",
")"
] | this is a backwards compatibility function . | train | false |
15,730 | def _HCCM(results, scale):
H = np.dot(results.model.pinv_wexog, (scale[:, None] * results.model.pinv_wexog.T))
return H
| [
"def",
"_HCCM",
"(",
"results",
",",
"scale",
")",
":",
"H",
"=",
"np",
".",
"dot",
"(",
"results",
".",
"model",
".",
"pinv_wexog",
",",
"(",
"scale",
"[",
":",
",",
"None",
"]",
"*",
"results",
".",
"model",
".",
"pinv_wexog",
".",
"T",
")",
")",
"return",
"H"
] | sandwich with pinv(x) * diag * pinv(x) . | train | false |
15,731 | def C(classname):
return objc.objc_getClass(_utf8(classname))
| [
"def",
"C",
"(",
"classname",
")",
":",
"return",
"objc",
".",
"objc_getClass",
"(",
"_utf8",
"(",
"classname",
")",
")"
] | get an objc class by name . | train | false |
15,732 | def ParseNamespaceQuery(query, filters, orders):
Check((not query.has_ancestor()), 'ancestor queries on __namespace__ not allowed')
key_range = ParseKeyFilteredQuery(filters, orders)
key_range.Remap(_NamespaceKeyToString)
return key_range
| [
"def",
"ParseNamespaceQuery",
"(",
"query",
",",
"filters",
",",
"orders",
")",
":",
"Check",
"(",
"(",
"not",
"query",
".",
"has_ancestor",
"(",
")",
")",
",",
"'ancestor queries on __namespace__ not allowed'",
")",
"key_range",
"=",
"ParseKeyFilteredQuery",
"(",
"filters",
",",
"orders",
")",
"key_range",
".",
"Remap",
"(",
"_NamespaceKeyToString",
")",
"return",
"key_range"
] | parse __namespace__ queries . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.