id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
52,397
|
def base(url):
return urlparse.urlparse(url).netloc
|
[
"def",
"base",
"(",
"url",
")",
":",
"return",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
".",
"netloc"
] |
returns the url domain name: URL => en .
|
train
| false
|
52,398
|
def win_path_to_unix(path, root_prefix=u''):
path_re = u'(?<![:/^a-zA-Z])([a-zA-Z]:[\\/\\\\]+(?:[^:*?"<>|]+[\\/\\\\]+)*[^:*?"<>|;\\/\\\\]+?(?![a-zA-Z]:))'
def _translation(found_path):
found = found_path.group(1).replace(u'\\', u'/').replace(u':', u'').replace(u'//', u'/')
return ((root_prefix + u'/') + found)
path = re.sub(path_re, _translation, path).replace(u';/', u':/')
return path
|
[
"def",
"win_path_to_unix",
"(",
"path",
",",
"root_prefix",
"=",
"u''",
")",
":",
"path_re",
"=",
"u'(?<![:/^a-zA-Z])([a-zA-Z]:[\\\\/\\\\\\\\]+(?:[^:*?\"<>|]+[\\\\/\\\\\\\\]+)*[^:*?\"<>|;\\\\/\\\\\\\\]+?(?![a-zA-Z]:))'",
"def",
"_translation",
"(",
"found_path",
")",
":",
"found",
"=",
"found_path",
".",
"group",
"(",
"1",
")",
".",
"replace",
"(",
"u'\\\\'",
",",
"u'/'",
")",
".",
"replace",
"(",
"u':'",
",",
"u''",
")",
".",
"replace",
"(",
"u'//'",
",",
"u'/'",
")",
"return",
"(",
"(",
"root_prefix",
"+",
"u'/'",
")",
"+",
"found",
")",
"path",
"=",
"re",
".",
"sub",
"(",
"path_re",
",",
"_translation",
",",
"path",
")",
".",
"replace",
"(",
"u';/'",
",",
"u':/'",
")",
"return",
"path"
] |
convert a path or ;-separated string of paths into a unix representation does not add cygdrive .
|
train
| false
|
52,399
|
def cast_to_floatx(x):
return np.asarray(x, dtype=_FLOATX)
|
[
"def",
"cast_to_floatx",
"(",
"x",
")",
":",
"return",
"np",
".",
"asarray",
"(",
"x",
",",
"dtype",
"=",
"_FLOATX",
")"
] |
cast a numpy array to the default keras float type .
|
train
| false
|
52,400
|
def sync_data_from_mailchimp(**kwargs):
key = request.args.get('key')
if (key == settings.MAILCHIMP_WEBHOOK_SECRET_KEY):
r = request
action = r.values['type']
list_name = mailchimp_utils.get_list_name_from_id(list_id=r.values['data[list_id]'])
username = r.values['data[email]']
try:
user = User.find_one(Q('username', 'eq', username))
except NoResultsFound:
sentry.log_exception()
sentry.log_message('A user with this username does not exist.')
raise HTTPError(404, data=dict(message_short='User not found', message_long='A user with this username does not exist'))
if (action == 'unsubscribe'):
user.mailchimp_mailing_lists[list_name] = False
user.save()
elif (action == 'subscribe'):
user.mailchimp_mailing_lists[list_name] = True
user.save()
else:
raise HTTPError(http.UNAUTHORIZED)
|
[
"def",
"sync_data_from_mailchimp",
"(",
"**",
"kwargs",
")",
":",
"key",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'key'",
")",
"if",
"(",
"key",
"==",
"settings",
".",
"MAILCHIMP_WEBHOOK_SECRET_KEY",
")",
":",
"r",
"=",
"request",
"action",
"=",
"r",
".",
"values",
"[",
"'type'",
"]",
"list_name",
"=",
"mailchimp_utils",
".",
"get_list_name_from_id",
"(",
"list_id",
"=",
"r",
".",
"values",
"[",
"'data[list_id]'",
"]",
")",
"username",
"=",
"r",
".",
"values",
"[",
"'data[email]'",
"]",
"try",
":",
"user",
"=",
"User",
".",
"find_one",
"(",
"Q",
"(",
"'username'",
",",
"'eq'",
",",
"username",
")",
")",
"except",
"NoResultsFound",
":",
"sentry",
".",
"log_exception",
"(",
")",
"sentry",
".",
"log_message",
"(",
"'A user with this username does not exist.'",
")",
"raise",
"HTTPError",
"(",
"404",
",",
"data",
"=",
"dict",
"(",
"message_short",
"=",
"'User not found'",
",",
"message_long",
"=",
"'A user with this username does not exist'",
")",
")",
"if",
"(",
"action",
"==",
"'unsubscribe'",
")",
":",
"user",
".",
"mailchimp_mailing_lists",
"[",
"list_name",
"]",
"=",
"False",
"user",
".",
"save",
"(",
")",
"elif",
"(",
"action",
"==",
"'subscribe'",
")",
":",
"user",
".",
"mailchimp_mailing_lists",
"[",
"list_name",
"]",
"=",
"True",
"user",
".",
"save",
"(",
")",
"else",
":",
"raise",
"HTTPError",
"(",
"http",
".",
"UNAUTHORIZED",
")"
] |
endpoint that the mailchimp webhook sends its data to .
|
train
| false
|
52,401
|
def get_module_part(dotted_name, context_file=None):
if dotted_name.startswith('os.path'):
return 'os.path'
parts = dotted_name.split('.')
if (context_file is not None):
if (parts[0] in BUILTIN_MODULES):
if (len(parts) > 2):
raise ImportError(dotted_name)
return parts[0]
path = None
starti = 0
if (parts[0] == ''):
assert (context_file is not None), 'explicit relative import, but no context_file?'
path = []
starti = 1
while (parts[starti] == ''):
starti += 1
context_file = dirname(context_file)
for i in range(starti, len(parts)):
try:
file_from_modpath(parts[starti:(i + 1)], path=path, context_file=context_file)
except ImportError:
if (not (i >= max(1, (len(parts) - 2)))):
raise
return '.'.join(parts[:i])
return dotted_name
|
[
"def",
"get_module_part",
"(",
"dotted_name",
",",
"context_file",
"=",
"None",
")",
":",
"if",
"dotted_name",
".",
"startswith",
"(",
"'os.path'",
")",
":",
"return",
"'os.path'",
"parts",
"=",
"dotted_name",
".",
"split",
"(",
"'.'",
")",
"if",
"(",
"context_file",
"is",
"not",
"None",
")",
":",
"if",
"(",
"parts",
"[",
"0",
"]",
"in",
"BUILTIN_MODULES",
")",
":",
"if",
"(",
"len",
"(",
"parts",
")",
">",
"2",
")",
":",
"raise",
"ImportError",
"(",
"dotted_name",
")",
"return",
"parts",
"[",
"0",
"]",
"path",
"=",
"None",
"starti",
"=",
"0",
"if",
"(",
"parts",
"[",
"0",
"]",
"==",
"''",
")",
":",
"assert",
"(",
"context_file",
"is",
"not",
"None",
")",
",",
"'explicit relative import, but no context_file?'",
"path",
"=",
"[",
"]",
"starti",
"=",
"1",
"while",
"(",
"parts",
"[",
"starti",
"]",
"==",
"''",
")",
":",
"starti",
"+=",
"1",
"context_file",
"=",
"dirname",
"(",
"context_file",
")",
"for",
"i",
"in",
"range",
"(",
"starti",
",",
"len",
"(",
"parts",
")",
")",
":",
"try",
":",
"file_from_modpath",
"(",
"parts",
"[",
"starti",
":",
"(",
"i",
"+",
"1",
")",
"]",
",",
"path",
"=",
"path",
",",
"context_file",
"=",
"context_file",
")",
"except",
"ImportError",
":",
"if",
"(",
"not",
"(",
"i",
">=",
"max",
"(",
"1",
",",
"(",
"len",
"(",
"parts",
")",
"-",
"2",
")",
")",
")",
")",
":",
"raise",
"return",
"'.'",
".",
"join",
"(",
"parts",
"[",
":",
"i",
"]",
")",
"return",
"dotted_name"
] |
given a dotted name return the module part of the name : .
|
train
| true
|
52,402
|
def hostinterface_update(interfaceid, **connection_args):
conn_args = _login(**connection_args)
try:
if conn_args:
method = 'hostinterface.update'
params = {'interfaceid': interfaceid}
params = _params_extend(params, **connection_args)
ret = _query(method, params, conn_args['url'], conn_args['auth'])
return ret['result']['interfaceids']
else:
raise KeyError
except KeyError:
return ret
|
[
"def",
"hostinterface_update",
"(",
"interfaceid",
",",
"**",
"connection_args",
")",
":",
"conn_args",
"=",
"_login",
"(",
"**",
"connection_args",
")",
"try",
":",
"if",
"conn_args",
":",
"method",
"=",
"'hostinterface.update'",
"params",
"=",
"{",
"'interfaceid'",
":",
"interfaceid",
"}",
"params",
"=",
"_params_extend",
"(",
"params",
",",
"**",
"connection_args",
")",
"ret",
"=",
"_query",
"(",
"method",
",",
"params",
",",
"conn_args",
"[",
"'url'",
"]",
",",
"conn_args",
"[",
"'auth'",
"]",
")",
"return",
"ret",
"[",
"'result'",
"]",
"[",
"'interfaceids'",
"]",
"else",
":",
"raise",
"KeyError",
"except",
"KeyError",
":",
"return",
"ret"
] |
update host interface note: this function accepts all standard hostinterface: keyword argument names differ depending on your zabbix version .
|
train
| true
|
52,407
|
def get_clients2(node):
l = []
for (c, i) in node.outputs[0].clients:
if (c != 'output'):
for var in c.outputs:
l.extend([cc for (cc, ii) in var.clients if (cc != 'output')])
return l
|
[
"def",
"get_clients2",
"(",
"node",
")",
":",
"l",
"=",
"[",
"]",
"for",
"(",
"c",
",",
"i",
")",
"in",
"node",
".",
"outputs",
"[",
"0",
"]",
".",
"clients",
":",
"if",
"(",
"c",
"!=",
"'output'",
")",
":",
"for",
"var",
"in",
"c",
".",
"outputs",
":",
"l",
".",
"extend",
"(",
"[",
"cc",
"for",
"(",
"cc",
",",
"ii",
")",
"in",
"var",
".",
"clients",
"if",
"(",
"cc",
"!=",
"'output'",
")",
"]",
")",
"return",
"l"
] |
used by erf/erfc opt to track less frequent op .
|
train
| false
|
52,408
|
def read_po(fileobj, locale=None, domain=None, ignore_obsolete=False, charset=None):
catalog = Catalog(locale=locale, domain=domain, charset=charset)
parser = PoFileParser(catalog, ignore_obsolete)
parser.parse(fileobj)
return catalog
|
[
"def",
"read_po",
"(",
"fileobj",
",",
"locale",
"=",
"None",
",",
"domain",
"=",
"None",
",",
"ignore_obsolete",
"=",
"False",
",",
"charset",
"=",
"None",
")",
":",
"catalog",
"=",
"Catalog",
"(",
"locale",
"=",
"locale",
",",
"domain",
"=",
"domain",
",",
"charset",
"=",
"charset",
")",
"parser",
"=",
"PoFileParser",
"(",
"catalog",
",",
"ignore_obsolete",
")",
"parser",
".",
"parse",
"(",
"fileobj",
")",
"return",
"catalog"
] |
read messages from a gettext po file from the given file-like object and return a catalog .
|
train
| false
|
52,409
|
def gafiterator(handle):
inline = handle.readline()
if (inline.strip() == '!gaf-version: 2.0'):
return _gaf20iterator(handle)
elif (inline.strip() == '!gaf-version: 2.1'):
return _gaf20iterator(handle)
elif (inline.strip() == '!gaf-version: 1.0'):
return _gaf10iterator(handle)
else:
raise ValueError('Unknown GAF version {0}\n'.format(inline))
|
[
"def",
"gafiterator",
"(",
"handle",
")",
":",
"inline",
"=",
"handle",
".",
"readline",
"(",
")",
"if",
"(",
"inline",
".",
"strip",
"(",
")",
"==",
"'!gaf-version: 2.0'",
")",
":",
"return",
"_gaf20iterator",
"(",
"handle",
")",
"elif",
"(",
"inline",
".",
"strip",
"(",
")",
"==",
"'!gaf-version: 2.1'",
")",
":",
"return",
"_gaf20iterator",
"(",
"handle",
")",
"elif",
"(",
"inline",
".",
"strip",
"(",
")",
"==",
"'!gaf-version: 1.0'",
")",
":",
"return",
"_gaf10iterator",
"(",
"handle",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unknown GAF version {0}\\n'",
".",
"format",
"(",
"inline",
")",
")"
] |
iterate over a gaf 1 .
|
train
| false
|
52,411
|
def ModuleNameHasPrefix(module_name, prefix_set):
for prefix in prefix_set:
if (prefix == module_name):
return True
if module_name.startswith((prefix + '.')):
return True
return False
|
[
"def",
"ModuleNameHasPrefix",
"(",
"module_name",
",",
"prefix_set",
")",
":",
"for",
"prefix",
"in",
"prefix_set",
":",
"if",
"(",
"prefix",
"==",
"module_name",
")",
":",
"return",
"True",
"if",
"module_name",
".",
"startswith",
"(",
"(",
"prefix",
"+",
"'.'",
")",
")",
":",
"return",
"True",
"return",
"False"
] |
determines if a modules name belongs to a set of prefix strings .
|
train
| false
|
52,412
|
def libvlc_media_discoverer_is_running(p_mdis):
f = (_Cfunctions.get('libvlc_media_discoverer_is_running', None) or _Cfunction('libvlc_media_discoverer_is_running', ((1,),), None, ctypes.c_int, MediaDiscoverer))
return f(p_mdis)
|
[
"def",
"libvlc_media_discoverer_is_running",
"(",
"p_mdis",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_discoverer_is_running'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_discoverer_is_running'",
",",
"(",
"(",
"1",
",",
")",
",",
")",
",",
"None",
",",
"ctypes",
".",
"c_int",
",",
"MediaDiscoverer",
")",
")",
"return",
"f",
"(",
"p_mdis",
")"
] |
query if media service discover object is running .
|
train
| true
|
52,413
|
def missing_test_message(msg):
action = config.compute_test_value
if (action == 'raise'):
raise AttributeError(msg)
elif (action == 'warn'):
warnings.warn(msg, stacklevel=2)
else:
assert (action in ['ignore', 'off'])
|
[
"def",
"missing_test_message",
"(",
"msg",
")",
":",
"action",
"=",
"config",
".",
"compute_test_value",
"if",
"(",
"action",
"==",
"'raise'",
")",
":",
"raise",
"AttributeError",
"(",
"msg",
")",
"elif",
"(",
"action",
"==",
"'warn'",
")",
":",
"warnings",
".",
"warn",
"(",
"msg",
",",
"stacklevel",
"=",
"2",
")",
"else",
":",
"assert",
"(",
"action",
"in",
"[",
"'ignore'",
",",
"'off'",
"]",
")"
] |
displays msg .
|
train
| false
|
52,414
|
def scanNetwork():
devices = []
for remoteAddress in range(2, 6):
print ('Trying address ' + str(remoteAddress))
p = snap.Packet(remoteAddress, snap.localAddress, 0, 1, [CMD_GETMODULETYPE])
p.send()
rep = p.getReply()
print 'dev', remoteAddress
time.sleep(0.5)
for d in devices:
print 'device', d
|
[
"def",
"scanNetwork",
"(",
")",
":",
"devices",
"=",
"[",
"]",
"for",
"remoteAddress",
"in",
"range",
"(",
"2",
",",
"6",
")",
":",
"print",
"(",
"'Trying address '",
"+",
"str",
"(",
"remoteAddress",
")",
")",
"p",
"=",
"snap",
".",
"Packet",
"(",
"remoteAddress",
",",
"snap",
".",
"localAddress",
",",
"0",
",",
"1",
",",
"[",
"CMD_GETMODULETYPE",
"]",
")",
"p",
".",
"send",
"(",
")",
"rep",
"=",
"p",
".",
"getReply",
"(",
")",
"print",
"'dev'",
",",
"remoteAddress",
"time",
".",
"sleep",
"(",
"0.5",
")",
"for",
"d",
"in",
"devices",
":",
"print",
"'device'",
",",
"d"
] |
scan reprap network for devices - this will be used by autoconfig functions when complete .
|
train
| false
|
52,416
|
def resolve_one_media(file_id, resource):
_file = app.media.get(file_id, resource)
if _file:
if config.RETURN_MEDIA_AS_BASE64_STRING:
ret_file = base64.encodestring(_file.read())
elif config.RETURN_MEDIA_AS_URL:
prefix = (config.MEDIA_BASE_URL if (config.MEDIA_BASE_URL is not None) else app.api_prefix)
ret_file = ('%s/%s/%s' % (prefix, config.MEDIA_ENDPOINT, file_id))
else:
ret_file = None
if config.EXTENDED_MEDIA_INFO:
ret = {'file': ret_file}
for attribute in config.EXTENDED_MEDIA_INFO:
if hasattr(_file, attribute):
ret.update({attribute: getattr(_file, attribute)})
else:
abort(500, description=debug_error_message('Invalid extended media attribute requested'))
return ret
else:
return ret_file
else:
return None
|
[
"def",
"resolve_one_media",
"(",
"file_id",
",",
"resource",
")",
":",
"_file",
"=",
"app",
".",
"media",
".",
"get",
"(",
"file_id",
",",
"resource",
")",
"if",
"_file",
":",
"if",
"config",
".",
"RETURN_MEDIA_AS_BASE64_STRING",
":",
"ret_file",
"=",
"base64",
".",
"encodestring",
"(",
"_file",
".",
"read",
"(",
")",
")",
"elif",
"config",
".",
"RETURN_MEDIA_AS_URL",
":",
"prefix",
"=",
"(",
"config",
".",
"MEDIA_BASE_URL",
"if",
"(",
"config",
".",
"MEDIA_BASE_URL",
"is",
"not",
"None",
")",
"else",
"app",
".",
"api_prefix",
")",
"ret_file",
"=",
"(",
"'%s/%s/%s'",
"%",
"(",
"prefix",
",",
"config",
".",
"MEDIA_ENDPOINT",
",",
"file_id",
")",
")",
"else",
":",
"ret_file",
"=",
"None",
"if",
"config",
".",
"EXTENDED_MEDIA_INFO",
":",
"ret",
"=",
"{",
"'file'",
":",
"ret_file",
"}",
"for",
"attribute",
"in",
"config",
".",
"EXTENDED_MEDIA_INFO",
":",
"if",
"hasattr",
"(",
"_file",
",",
"attribute",
")",
":",
"ret",
".",
"update",
"(",
"{",
"attribute",
":",
"getattr",
"(",
"_file",
",",
"attribute",
")",
"}",
")",
"else",
":",
"abort",
"(",
"500",
",",
"description",
"=",
"debug_error_message",
"(",
"'Invalid extended media attribute requested'",
")",
")",
"return",
"ret",
"else",
":",
"return",
"ret_file",
"else",
":",
"return",
"None"
] |
get response for one media file .
|
train
| false
|
52,417
|
def testMatrix():
tr = pg.SRTTransform3D()
tr.setRotate(45, (0, 0, 1))
tr.setScale(0.2, 0.4, 1)
tr.setTranslate(10, 20, 40)
assert (tr.getRotation() == (45, QtGui.QVector3D(0, 0, 1)))
assert (tr.getScale() == QtGui.QVector3D(0.2, 0.4, 1))
assert (tr.getTranslation() == QtGui.QVector3D(10, 20, 40))
tr2 = pg.Transform3D(tr)
assert np.all((tr.matrix() == tr2.matrix()))
tr3 = pg.SRTTransform3D(tr2)
assert_array_almost_equal(tr.matrix(), tr3.matrix())
assert_almost_equal(tr3.getRotation()[0], tr.getRotation()[0])
assert_array_almost_equal(tr3.getRotation()[1], tr.getRotation()[1])
assert_array_almost_equal(tr3.getScale(), tr.getScale())
assert_array_almost_equal(tr3.getTranslation(), tr.getTranslation())
|
[
"def",
"testMatrix",
"(",
")",
":",
"tr",
"=",
"pg",
".",
"SRTTransform3D",
"(",
")",
"tr",
".",
"setRotate",
"(",
"45",
",",
"(",
"0",
",",
"0",
",",
"1",
")",
")",
"tr",
".",
"setScale",
"(",
"0.2",
",",
"0.4",
",",
"1",
")",
"tr",
".",
"setTranslate",
"(",
"10",
",",
"20",
",",
"40",
")",
"assert",
"(",
"tr",
".",
"getRotation",
"(",
")",
"==",
"(",
"45",
",",
"QtGui",
".",
"QVector3D",
"(",
"0",
",",
"0",
",",
"1",
")",
")",
")",
"assert",
"(",
"tr",
".",
"getScale",
"(",
")",
"==",
"QtGui",
".",
"QVector3D",
"(",
"0.2",
",",
"0.4",
",",
"1",
")",
")",
"assert",
"(",
"tr",
".",
"getTranslation",
"(",
")",
"==",
"QtGui",
".",
"QVector3D",
"(",
"10",
",",
"20",
",",
"40",
")",
")",
"tr2",
"=",
"pg",
".",
"Transform3D",
"(",
"tr",
")",
"assert",
"np",
".",
"all",
"(",
"(",
"tr",
".",
"matrix",
"(",
")",
"==",
"tr2",
".",
"matrix",
"(",
")",
")",
")",
"tr3",
"=",
"pg",
".",
"SRTTransform3D",
"(",
"tr2",
")",
"assert_array_almost_equal",
"(",
"tr",
".",
"matrix",
"(",
")",
",",
"tr3",
".",
"matrix",
"(",
")",
")",
"assert_almost_equal",
"(",
"tr3",
".",
"getRotation",
"(",
")",
"[",
"0",
"]",
",",
"tr",
".",
"getRotation",
"(",
")",
"[",
"0",
"]",
")",
"assert_array_almost_equal",
"(",
"tr3",
".",
"getRotation",
"(",
")",
"[",
"1",
"]",
",",
"tr",
".",
"getRotation",
"(",
")",
"[",
"1",
"]",
")",
"assert_array_almost_equal",
"(",
"tr3",
".",
"getScale",
"(",
")",
",",
"tr",
".",
"getScale",
"(",
")",
")",
"assert_array_almost_equal",
"(",
"tr3",
".",
"getTranslation",
"(",
")",
",",
"tr",
".",
"getTranslation",
"(",
")",
")"
] |
srttransform3d => transform3d => srttransform3d .
|
train
| false
|
52,418
|
def s3_fieldmethod(name, f, represent=None):
from gluon import Field
if (represent is not None):
class Handler(object, ):
def __init__(self, method, row):
self.method = method
self.row = row
def __call__(self, *args, **kwargs):
return self.method(self.row, *args, **kwargs)
if hasattr(represent, 'bulk'):
Handler.represent = represent
else:
Handler.represent = staticmethod(represent)
fieldmethod = Field.Method(name, f, handler=Handler)
else:
fieldmethod = Field.Method(name, f)
return fieldmethod
|
[
"def",
"s3_fieldmethod",
"(",
"name",
",",
"f",
",",
"represent",
"=",
"None",
")",
":",
"from",
"gluon",
"import",
"Field",
"if",
"(",
"represent",
"is",
"not",
"None",
")",
":",
"class",
"Handler",
"(",
"object",
",",
")",
":",
"def",
"__init__",
"(",
"self",
",",
"method",
",",
"row",
")",
":",
"self",
".",
"method",
"=",
"method",
"self",
".",
"row",
"=",
"row",
"def",
"__call__",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"self",
".",
"method",
"(",
"self",
".",
"row",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"if",
"hasattr",
"(",
"represent",
",",
"'bulk'",
")",
":",
"Handler",
".",
"represent",
"=",
"represent",
"else",
":",
"Handler",
".",
"represent",
"=",
"staticmethod",
"(",
"represent",
")",
"fieldmethod",
"=",
"Field",
".",
"Method",
"(",
"name",
",",
"f",
",",
"handler",
"=",
"Handler",
")",
"else",
":",
"fieldmethod",
"=",
"Field",
".",
"Method",
"(",
"name",
",",
"f",
")",
"return",
"fieldmethod"
] |
helper to attach a representation method to a field .
|
train
| false
|
52,419
|
def umount_all(root_path):
def is_under_root(path):
try:
FilePath(path).segmentsFrom(root_path)
except ValueError:
return False
return True
partitions_under_root = list((p for p in psutil.disk_partitions() if is_under_root(p.mountpoint)))
for partition in partitions_under_root:
umount(FilePath(partition.mountpoint))
|
[
"def",
"umount_all",
"(",
"root_path",
")",
":",
"def",
"is_under_root",
"(",
"path",
")",
":",
"try",
":",
"FilePath",
"(",
"path",
")",
".",
"segmentsFrom",
"(",
"root_path",
")",
"except",
"ValueError",
":",
"return",
"False",
"return",
"True",
"partitions_under_root",
"=",
"list",
"(",
"(",
"p",
"for",
"p",
"in",
"psutil",
".",
"disk_partitions",
"(",
")",
"if",
"is_under_root",
"(",
"p",
".",
"mountpoint",
")",
")",
")",
"for",
"partition",
"in",
"partitions_under_root",
":",
"umount",
"(",
"FilePath",
"(",
"partition",
".",
"mountpoint",
")",
")"
] |
unmount all devices with mount points contained in root_path .
|
train
| false
|
52,421
|
def formset_data_extractor(recurring=[], submitted=[]):
def _data_extractor(request):
if (not submitted):
return []
data = []
for param in submitted:
i = 0
for val in request.POST.getlist(param):
if (len(data) == i):
data.append({})
data[i][param] = val
i += 1
for kwargs in data:
for recurrent in recurring:
kwargs[recurrent] = request.POST.get(recurrent)
initial = list(data)
return {'initial': initial, 'data': data}
return _data_extractor
|
[
"def",
"formset_data_extractor",
"(",
"recurring",
"=",
"[",
"]",
",",
"submitted",
"=",
"[",
"]",
")",
":",
"def",
"_data_extractor",
"(",
"request",
")",
":",
"if",
"(",
"not",
"submitted",
")",
":",
"return",
"[",
"]",
"data",
"=",
"[",
"]",
"for",
"param",
"in",
"submitted",
":",
"i",
"=",
"0",
"for",
"val",
"in",
"request",
".",
"POST",
".",
"getlist",
"(",
"param",
")",
":",
"if",
"(",
"len",
"(",
"data",
")",
"==",
"i",
")",
":",
"data",
".",
"append",
"(",
"{",
"}",
")",
"data",
"[",
"i",
"]",
"[",
"param",
"]",
"=",
"val",
"i",
"+=",
"1",
"for",
"kwargs",
"in",
"data",
":",
"for",
"recurrent",
"in",
"recurring",
":",
"kwargs",
"[",
"recurrent",
"]",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"recurrent",
")",
"initial",
"=",
"list",
"(",
"data",
")",
"return",
"{",
"'initial'",
":",
"initial",
",",
"'data'",
":",
"data",
"}",
"return",
"_data_extractor"
] |
builds a list of data that formsets should use by extending some fields to every object .
|
train
| false
|
52,422
|
def save_thumbnail(image, thumb_path, shape):
rescale = min(((float(w_1) / w_2) for (w_1, w_2) in zip(shape, image.shape)))
small_shape = (rescale * np.asarray(image.shape[:2])).astype(int)
small_image = transform.resize(image, small_shape)
if (len(image.shape) == 3):
shape = (shape + (image.shape[2],))
background_value = dtype_range[small_image.dtype.type][1]
thumb = (background_value * np.ones(shape, dtype=small_image.dtype))
i = ((shape[0] - small_shape[0]) // 2)
j = ((shape[1] - small_shape[1]) // 2)
thumb[i:(i + small_shape[0]), j:(j + small_shape[1])] = small_image
io.imsave(thumb_path, thumb)
|
[
"def",
"save_thumbnail",
"(",
"image",
",",
"thumb_path",
",",
"shape",
")",
":",
"rescale",
"=",
"min",
"(",
"(",
"(",
"float",
"(",
"w_1",
")",
"/",
"w_2",
")",
"for",
"(",
"w_1",
",",
"w_2",
")",
"in",
"zip",
"(",
"shape",
",",
"image",
".",
"shape",
")",
")",
")",
"small_shape",
"=",
"(",
"rescale",
"*",
"np",
".",
"asarray",
"(",
"image",
".",
"shape",
"[",
":",
"2",
"]",
")",
")",
".",
"astype",
"(",
"int",
")",
"small_image",
"=",
"transform",
".",
"resize",
"(",
"image",
",",
"small_shape",
")",
"if",
"(",
"len",
"(",
"image",
".",
"shape",
")",
"==",
"3",
")",
":",
"shape",
"=",
"(",
"shape",
"+",
"(",
"image",
".",
"shape",
"[",
"2",
"]",
",",
")",
")",
"background_value",
"=",
"dtype_range",
"[",
"small_image",
".",
"dtype",
".",
"type",
"]",
"[",
"1",
"]",
"thumb",
"=",
"(",
"background_value",
"*",
"np",
".",
"ones",
"(",
"shape",
",",
"dtype",
"=",
"small_image",
".",
"dtype",
")",
")",
"i",
"=",
"(",
"(",
"shape",
"[",
"0",
"]",
"-",
"small_shape",
"[",
"0",
"]",
")",
"//",
"2",
")",
"j",
"=",
"(",
"(",
"shape",
"[",
"1",
"]",
"-",
"small_shape",
"[",
"1",
"]",
")",
"//",
"2",
")",
"thumb",
"[",
"i",
":",
"(",
"i",
"+",
"small_shape",
"[",
"0",
"]",
")",
",",
"j",
":",
"(",
"j",
"+",
"small_shape",
"[",
"1",
"]",
")",
"]",
"=",
"small_image",
"io",
".",
"imsave",
"(",
"thumb_path",
",",
"thumb",
")"
] |
save image as a thumbnail with the specified shape .
|
train
| false
|
52,423
|
def long2bin(size, value, endian, classic_mode=False):
text = ''
assert (endian in (LITTLE_ENDIAN, BIG_ENDIAN))
assert (0 <= value)
for index in xrange(size):
if ((value & 1) == 1):
text += '1'
else:
text += '0'
value >>= 1
if (endian is LITTLE_ENDIAN):
text = text[::(-1)]
result = ''
while (len(text) != 0):
if (len(result) != 0):
result += ' '
if classic_mode:
result += text[7::(-1)]
else:
result += text[:8]
text = text[8:]
return result
|
[
"def",
"long2bin",
"(",
"size",
",",
"value",
",",
"endian",
",",
"classic_mode",
"=",
"False",
")",
":",
"text",
"=",
"''",
"assert",
"(",
"endian",
"in",
"(",
"LITTLE_ENDIAN",
",",
"BIG_ENDIAN",
")",
")",
"assert",
"(",
"0",
"<=",
"value",
")",
"for",
"index",
"in",
"xrange",
"(",
"size",
")",
":",
"if",
"(",
"(",
"value",
"&",
"1",
")",
"==",
"1",
")",
":",
"text",
"+=",
"'1'",
"else",
":",
"text",
"+=",
"'0'",
"value",
">>=",
"1",
"if",
"(",
"endian",
"is",
"LITTLE_ENDIAN",
")",
":",
"text",
"=",
"text",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
"result",
"=",
"''",
"while",
"(",
"len",
"(",
"text",
")",
"!=",
"0",
")",
":",
"if",
"(",
"len",
"(",
"result",
")",
"!=",
"0",
")",
":",
"result",
"+=",
"' '",
"if",
"classic_mode",
":",
"result",
"+=",
"text",
"[",
"7",
":",
":",
"(",
"-",
"1",
")",
"]",
"else",
":",
"result",
"+=",
"text",
"[",
":",
"8",
"]",
"text",
"=",
"text",
"[",
"8",
":",
"]",
"return",
"result"
] |
convert a number into bits : - size: size in bits of the number - value: positive number - endian: big_endian or little_endian - classic_mode : reverse each packet of 8 bits .
|
train
| false
|
52,424
|
def to_choices_dict(choices):
ret = OrderedDict()
for choice in choices:
if (not isinstance(choice, (list, tuple))):
ret[choice] = choice
else:
(key, value) = choice
if isinstance(value, (list, tuple)):
ret[key] = to_choices_dict(value)
else:
ret[key] = value
return ret
|
[
"def",
"to_choices_dict",
"(",
"choices",
")",
":",
"ret",
"=",
"OrderedDict",
"(",
")",
"for",
"choice",
"in",
"choices",
":",
"if",
"(",
"not",
"isinstance",
"(",
"choice",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
":",
"ret",
"[",
"choice",
"]",
"=",
"choice",
"else",
":",
"(",
"key",
",",
"value",
")",
"=",
"choice",
"if",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"ret",
"[",
"key",
"]",
"=",
"to_choices_dict",
"(",
"value",
")",
"else",
":",
"ret",
"[",
"key",
"]",
"=",
"value",
"return",
"ret"
] |
convert choices into key/value dicts .
|
train
| true
|
52,425
|
def show_blob(repo, blob, decode, outstream=sys.stdout):
outstream.write(decode(blob.data))
|
[
"def",
"show_blob",
"(",
"repo",
",",
"blob",
",",
"decode",
",",
"outstream",
"=",
"sys",
".",
"stdout",
")",
":",
"outstream",
".",
"write",
"(",
"decode",
"(",
"blob",
".",
"data",
")",
")"
] |
write a blob to a stream .
|
train
| false
|
52,427
|
def authenticate_lti_user(request, lti_user_id, lti_consumer):
try:
lti_user = LtiUser.objects.get(lti_user_id=lti_user_id, lti_consumer=lti_consumer)
except LtiUser.DoesNotExist:
lti_user = create_lti_user(lti_user_id, lti_consumer)
if (not (request.user.is_authenticated() and (request.user == lti_user.edx_user))):
switch_user(request, lti_user, lti_consumer)
|
[
"def",
"authenticate_lti_user",
"(",
"request",
",",
"lti_user_id",
",",
"lti_consumer",
")",
":",
"try",
":",
"lti_user",
"=",
"LtiUser",
".",
"objects",
".",
"get",
"(",
"lti_user_id",
"=",
"lti_user_id",
",",
"lti_consumer",
"=",
"lti_consumer",
")",
"except",
"LtiUser",
".",
"DoesNotExist",
":",
"lti_user",
"=",
"create_lti_user",
"(",
"lti_user_id",
",",
"lti_consumer",
")",
"if",
"(",
"not",
"(",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
"and",
"(",
"request",
".",
"user",
"==",
"lti_user",
".",
"edx_user",
")",
")",
")",
":",
"switch_user",
"(",
"request",
",",
"lti_user",
",",
"lti_consumer",
")"
] |
determine whether the user specified by the lti launch has an existing account .
|
train
| false
|
52,428
|
def st_api(img_style, img_content, callback=None):
global workers
all_args = [{'length': 360, 'ratio': 2000.0, 'n_iter': 32, 'callback': callback}, {'length': 512, 'ratio': 20000.0, 'n_iter': 16, 'callback': callback}]
st_lock = None
st_worker = None
while (st_lock is None):
for (lock, worker) in workers.iteritems():
if lock.acquire(False):
st_lock = lock
st_worker = worker
break
else:
time.sleep(0.1)
img_out = 'content'
for args in all_args:
args['init'] = img_out
st_worker.transfer_style(img_style, img_content, **args)
img_out = st_worker.get_generated()
st_lock.release()
return img_out
|
[
"def",
"st_api",
"(",
"img_style",
",",
"img_content",
",",
"callback",
"=",
"None",
")",
":",
"global",
"workers",
"all_args",
"=",
"[",
"{",
"'length'",
":",
"360",
",",
"'ratio'",
":",
"2000.0",
",",
"'n_iter'",
":",
"32",
",",
"'callback'",
":",
"callback",
"}",
",",
"{",
"'length'",
":",
"512",
",",
"'ratio'",
":",
"20000.0",
",",
"'n_iter'",
":",
"16",
",",
"'callback'",
":",
"callback",
"}",
"]",
"st_lock",
"=",
"None",
"st_worker",
"=",
"None",
"while",
"(",
"st_lock",
"is",
"None",
")",
":",
"for",
"(",
"lock",
",",
"worker",
")",
"in",
"workers",
".",
"iteritems",
"(",
")",
":",
"if",
"lock",
".",
"acquire",
"(",
"False",
")",
":",
"st_lock",
"=",
"lock",
"st_worker",
"=",
"worker",
"break",
"else",
":",
"time",
".",
"sleep",
"(",
"0.1",
")",
"img_out",
"=",
"'content'",
"for",
"args",
"in",
"all_args",
":",
"args",
"[",
"'init'",
"]",
"=",
"img_out",
"st_worker",
".",
"transfer_style",
"(",
"img_style",
",",
"img_content",
",",
"**",
"args",
")",
"img_out",
"=",
"st_worker",
".",
"get_generated",
"(",
")",
"st_lock",
".",
"release",
"(",
")",
"return",
"img_out"
] |
style transfer api .
|
train
| false
|
52,431
|
def mask(buf, key):
key = [ord(i) for i in key]
buf = list(buf)
for (i, char) in enumerate(buf):
buf[i] = chr((ord(char) ^ key[(i % 4)]))
return ''.join(buf)
|
[
"def",
"mask",
"(",
"buf",
",",
"key",
")",
":",
"key",
"=",
"[",
"ord",
"(",
"i",
")",
"for",
"i",
"in",
"key",
"]",
"buf",
"=",
"list",
"(",
"buf",
")",
"for",
"(",
"i",
",",
"char",
")",
"in",
"enumerate",
"(",
"buf",
")",
":",
"buf",
"[",
"i",
"]",
"=",
"chr",
"(",
"(",
"ord",
"(",
"char",
")",
"^",
"key",
"[",
"(",
"i",
"%",
"4",
")",
"]",
")",
")",
"return",
"''",
".",
"join",
"(",
"buf",
")"
] |
mask or unmask a buffer of bytes with a masking key .
|
train
| false
|
52,432
|
def eigvalsh(a, b=None, lower=True, overwrite_a=False, overwrite_b=False, turbo=True, eigvals=None, type=1, check_finite=True):
return eigh(a, b=b, lower=lower, eigvals_only=True, overwrite_a=overwrite_a, overwrite_b=overwrite_b, turbo=turbo, eigvals=eigvals, type=type, check_finite=check_finite)
|
[
"def",
"eigvalsh",
"(",
"a",
",",
"b",
"=",
"None",
",",
"lower",
"=",
"True",
",",
"overwrite_a",
"=",
"False",
",",
"overwrite_b",
"=",
"False",
",",
"turbo",
"=",
"True",
",",
"eigvals",
"=",
"None",
",",
"type",
"=",
"1",
",",
"check_finite",
"=",
"True",
")",
":",
"return",
"eigh",
"(",
"a",
",",
"b",
"=",
"b",
",",
"lower",
"=",
"lower",
",",
"eigvals_only",
"=",
"True",
",",
"overwrite_a",
"=",
"overwrite_a",
",",
"overwrite_b",
"=",
"overwrite_b",
",",
"turbo",
"=",
"turbo",
",",
"eigvals",
"=",
"eigvals",
",",
"type",
"=",
"type",
",",
"check_finite",
"=",
"check_finite",
")"
] |
solve an ordinary or generalized eigenvalue problem for a complex hermitian or real symmetric matrix .
|
train
| false
|
52,433
|
def make_sudoku_CSP():
rows = range(9)
cols = range(9)
vars = cross(rows, cols)
domains = defaultdict((lambda : range(1, 10)))
triples = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
unitlist = (([cross(rows, [c]) for c in cols] + [cross([r], cols) for r in rows]) + [cross(rs, cs) for rs in triples for cs in triples])
neighbors = defaultdict((lambda : set([])))
for unit in unitlist:
for cell in unit:
neighbors[cell].update(unit)
neighbors[cell].remove(cell)
return SudokuCSP(vars=vars, domains=domains, neighbors=neighbors, binary_constraint=unequal_vals_constraint)
|
[
"def",
"make_sudoku_CSP",
"(",
")",
":",
"rows",
"=",
"range",
"(",
"9",
")",
"cols",
"=",
"range",
"(",
"9",
")",
"vars",
"=",
"cross",
"(",
"rows",
",",
"cols",
")",
"domains",
"=",
"defaultdict",
"(",
"(",
"lambda",
":",
"range",
"(",
"1",
",",
"10",
")",
")",
")",
"triples",
"=",
"[",
"[",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"3",
",",
"4",
",",
"5",
"]",
",",
"[",
"6",
",",
"7",
",",
"8",
"]",
"]",
"unitlist",
"=",
"(",
"(",
"[",
"cross",
"(",
"rows",
",",
"[",
"c",
"]",
")",
"for",
"c",
"in",
"cols",
"]",
"+",
"[",
"cross",
"(",
"[",
"r",
"]",
",",
"cols",
")",
"for",
"r",
"in",
"rows",
"]",
")",
"+",
"[",
"cross",
"(",
"rs",
",",
"cs",
")",
"for",
"rs",
"in",
"triples",
"for",
"cs",
"in",
"triples",
"]",
")",
"neighbors",
"=",
"defaultdict",
"(",
"(",
"lambda",
":",
"set",
"(",
"[",
"]",
")",
")",
")",
"for",
"unit",
"in",
"unitlist",
":",
"for",
"cell",
"in",
"unit",
":",
"neighbors",
"[",
"cell",
"]",
".",
"update",
"(",
"unit",
")",
"neighbors",
"[",
"cell",
"]",
".",
"remove",
"(",
"cell",
")",
"return",
"SudokuCSP",
"(",
"vars",
"=",
"vars",
",",
"domains",
"=",
"domains",
",",
"neighbors",
"=",
"neighbors",
",",
"binary_constraint",
"=",
"unequal_vals_constraint",
")"
] |
a regular 9x9 sudoku puzzle .
|
train
| false
|
52,434
|
def drop_duplicate_nodes(tree, inserted_nodes):
for (name, node_list) in inserted_nodes.items():
if (len(node_list) == 1):
continue
node_list.sort(key=(lambda x: x.Score), reverse=True)
node_to_keep = node_list[0]
for n in node_list:
if (n is node_to_keep):
continue
elif (n.Parent is not None):
n.Parent.remove(n)
tree.prune()
return tree
|
[
"def",
"drop_duplicate_nodes",
"(",
"tree",
",",
"inserted_nodes",
")",
":",
"for",
"(",
"name",
",",
"node_list",
")",
"in",
"inserted_nodes",
".",
"items",
"(",
")",
":",
"if",
"(",
"len",
"(",
"node_list",
")",
"==",
"1",
")",
":",
"continue",
"node_list",
".",
"sort",
"(",
"key",
"=",
"(",
"lambda",
"x",
":",
"x",
".",
"Score",
")",
",",
"reverse",
"=",
"True",
")",
"node_to_keep",
"=",
"node_list",
"[",
"0",
"]",
"for",
"n",
"in",
"node_list",
":",
"if",
"(",
"n",
"is",
"node_to_keep",
")",
":",
"continue",
"elif",
"(",
"n",
".",
"Parent",
"is",
"not",
"None",
")",
":",
"n",
".",
"Parent",
".",
"remove",
"(",
"n",
")",
"tree",
".",
"prune",
"(",
")",
"return",
"tree"
] |
remove nodes from tree .
|
train
| false
|
52,436
|
def _indent(elem, level=0):
i = ('\n' + (level * ' '))
if len(elem):
if ((not elem.text) or (not elem.text.strip())):
elem.text = (i + ' ')
for e in elem:
_indent(e, (level + 1))
if ((not e.tail) or (not e.tail.strip())):
e.tail = (i + ' ')
if ((not e.tail) or (not e.tail.strip())):
e.tail = i
elif (level and ((not elem.tail) or (not elem.tail.strip()))):
elem.tail = i
|
[
"def",
"_indent",
"(",
"elem",
",",
"level",
"=",
"0",
")",
":",
"i",
"=",
"(",
"'\\n'",
"+",
"(",
"level",
"*",
"' '",
")",
")",
"if",
"len",
"(",
"elem",
")",
":",
"if",
"(",
"(",
"not",
"elem",
".",
"text",
")",
"or",
"(",
"not",
"elem",
".",
"text",
".",
"strip",
"(",
")",
")",
")",
":",
"elem",
".",
"text",
"=",
"(",
"i",
"+",
"' '",
")",
"for",
"e",
"in",
"elem",
":",
"_indent",
"(",
"e",
",",
"(",
"level",
"+",
"1",
")",
")",
"if",
"(",
"(",
"not",
"e",
".",
"tail",
")",
"or",
"(",
"not",
"e",
".",
"tail",
".",
"strip",
"(",
")",
")",
")",
":",
"e",
".",
"tail",
"=",
"(",
"i",
"+",
"' '",
")",
"if",
"(",
"(",
"not",
"e",
".",
"tail",
")",
"or",
"(",
"not",
"e",
".",
"tail",
".",
"strip",
"(",
")",
")",
")",
":",
"e",
".",
"tail",
"=",
"i",
"elif",
"(",
"level",
"and",
"(",
"(",
"not",
"elem",
".",
"tail",
")",
"or",
"(",
"not",
"elem",
".",
"tail",
".",
"strip",
"(",
")",
")",
")",
")",
":",
"elem",
".",
"tail",
"=",
"i"
] |
add the given number of space characters to the beginning of every non-blank line in s .
|
train
| false
|
52,437
|
def twobyte(val):
assert isinstance(val, int)
return divmod(val, 256)
|
[
"def",
"twobyte",
"(",
"val",
")",
":",
"assert",
"isinstance",
"(",
"val",
",",
"int",
")",
"return",
"divmod",
"(",
"val",
",",
"256",
")"
] |
convert an int argument into high and low bytes .
|
train
| false
|
52,438
|
def open_dir(dirname, indexname=None, mapped=True, readonly=False):
if (indexname is None):
indexname = _DEF_INDEX_NAME
from whoosh.filedb.filestore import FileStorage
storage = FileStorage(dirname, mapped=mapped, readonly=readonly)
return storage.open_index(indexname)
|
[
"def",
"open_dir",
"(",
"dirname",
",",
"indexname",
"=",
"None",
",",
"mapped",
"=",
"True",
",",
"readonly",
"=",
"False",
")",
":",
"if",
"(",
"indexname",
"is",
"None",
")",
":",
"indexname",
"=",
"_DEF_INDEX_NAME",
"from",
"whoosh",
".",
"filedb",
".",
"filestore",
"import",
"FileStorage",
"storage",
"=",
"FileStorage",
"(",
"dirname",
",",
"mapped",
"=",
"mapped",
",",
"readonly",
"=",
"readonly",
")",
"return",
"storage",
".",
"open_index",
"(",
"indexname",
")"
] |
convenience function for opening an index in a directory .
|
train
| false
|
52,439
|
def reverse_lex(ustring):
newstr = ''
for ii in ustring:
ordinance = ord(ii)
new_byte = (255 - ordinance)
char = chr(new_byte)
newstr += char
return newstr
|
[
"def",
"reverse_lex",
"(",
"ustring",
")",
":",
"newstr",
"=",
"''",
"for",
"ii",
"in",
"ustring",
":",
"ordinance",
"=",
"ord",
"(",
"ii",
")",
"new_byte",
"=",
"(",
"255",
"-",
"ordinance",
")",
"char",
"=",
"chr",
"(",
"new_byte",
")",
"newstr",
"+=",
"char",
"return",
"newstr"
] |
strings must be in unicode to reverse the string strings are returned in unicode and may not able able to be converted to a regular string args: ustring: string to reverse .
|
train
| false
|
52,441
|
def assert_dict_contains(test_case, expected, actual, message=''):
missing_items = []
mismatch_items = []
no_value = object()
for (key, expected_value) in expected.items():
actual_value = actual.get(key, no_value)
if (actual_value is no_value):
missing_items.append(key)
elif (actual_value != expected_value):
mismatch_items.append('{}: {} != {}'.format(key, expected_value, actual_value))
if (missing_items or mismatch_items):
test_case.fail('{}\nMissing items: {}\nMismatch items: {}\nActual items: {}'.format(message, missing_items, mismatch_items, actual))
|
[
"def",
"assert_dict_contains",
"(",
"test_case",
",",
"expected",
",",
"actual",
",",
"message",
"=",
"''",
")",
":",
"missing_items",
"=",
"[",
"]",
"mismatch_items",
"=",
"[",
"]",
"no_value",
"=",
"object",
"(",
")",
"for",
"(",
"key",
",",
"expected_value",
")",
"in",
"expected",
".",
"items",
"(",
")",
":",
"actual_value",
"=",
"actual",
".",
"get",
"(",
"key",
",",
"no_value",
")",
"if",
"(",
"actual_value",
"is",
"no_value",
")",
":",
"missing_items",
".",
"append",
"(",
"key",
")",
"elif",
"(",
"actual_value",
"!=",
"expected_value",
")",
":",
"mismatch_items",
".",
"append",
"(",
"'{}: {} != {}'",
".",
"format",
"(",
"key",
",",
"expected_value",
",",
"actual_value",
")",
")",
"if",
"(",
"missing_items",
"or",
"mismatch_items",
")",
":",
"test_case",
".",
"fail",
"(",
"'{}\\nMissing items: {}\\nMismatch items: {}\\nActual items: {}'",
".",
"format",
"(",
"message",
",",
"missing_items",
",",
"mismatch_items",
",",
"actual",
")",
")"
] |
fail unless the supplied actual dict contains all the items in expected .
|
train
| false
|
52,442
|
def pids():
return _psplatform.pids()
|
[
"def",
"pids",
"(",
")",
":",
"return",
"_psplatform",
".",
"pids",
"(",
")"
] |
return a list of current running pids .
|
train
| false
|
52,443
|
def QRatio(s1, s2, force_ascii=True):
p1 = utils.full_process(s1, force_ascii=force_ascii)
p2 = utils.full_process(s2, force_ascii=force_ascii)
if (not utils.validate_string(p1)):
return 0
if (not utils.validate_string(p2)):
return 0
return ratio(p1, p2)
|
[
"def",
"QRatio",
"(",
"s1",
",",
"s2",
",",
"force_ascii",
"=",
"True",
")",
":",
"p1",
"=",
"utils",
".",
"full_process",
"(",
"s1",
",",
"force_ascii",
"=",
"force_ascii",
")",
"p2",
"=",
"utils",
".",
"full_process",
"(",
"s2",
",",
"force_ascii",
"=",
"force_ascii",
")",
"if",
"(",
"not",
"utils",
".",
"validate_string",
"(",
"p1",
")",
")",
":",
"return",
"0",
"if",
"(",
"not",
"utils",
".",
"validate_string",
"(",
"p2",
")",
")",
":",
"return",
"0",
"return",
"ratio",
"(",
"p1",
",",
"p2",
")"
] |
quick ratio comparison between two strings .
|
train
| true
|
52,444
|
def get_lambda_alias(module, aws):
client = aws.client('lambda')
api_params = set_api_params(module, ('function_name', 'name'))
try:
results = client.get_alias(**api_params)
except (ClientError, ParamValidationError, MissingParametersError) as e:
if (e.response['Error']['Code'] == 'ResourceNotFoundException'):
results = None
else:
module.fail_json(msg='Error retrieving function alias: {0}'.format(e))
return results
|
[
"def",
"get_lambda_alias",
"(",
"module",
",",
"aws",
")",
":",
"client",
"=",
"aws",
".",
"client",
"(",
"'lambda'",
")",
"api_params",
"=",
"set_api_params",
"(",
"module",
",",
"(",
"'function_name'",
",",
"'name'",
")",
")",
"try",
":",
"results",
"=",
"client",
".",
"get_alias",
"(",
"**",
"api_params",
")",
"except",
"(",
"ClientError",
",",
"ParamValidationError",
",",
"MissingParametersError",
")",
"as",
"e",
":",
"if",
"(",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"==",
"'ResourceNotFoundException'",
")",
":",
"results",
"=",
"None",
"else",
":",
"module",
".",
"fail_json",
"(",
"msg",
"=",
"'Error retrieving function alias: {0}'",
".",
"format",
"(",
"e",
")",
")",
"return",
"results"
] |
returns the lambda function alias if it exists .
|
train
| false
|
52,445
|
def getSimplifiedInsetFromClockwiseLoop(loop, radius):
inset = []
for (pointIndex, begin) in enumerate(loop):
center = loop[((pointIndex + 1) % len(loop))]
end = loop[((pointIndex + 2) % len(loop))]
addInsetPointFromClockwiseTriple(begin, center, end, inset, radius)
return getWithoutIntersections(euclidean.getSimplifiedLoop(inset, radius))
|
[
"def",
"getSimplifiedInsetFromClockwiseLoop",
"(",
"loop",
",",
"radius",
")",
":",
"inset",
"=",
"[",
"]",
"for",
"(",
"pointIndex",
",",
"begin",
")",
"in",
"enumerate",
"(",
"loop",
")",
":",
"center",
"=",
"loop",
"[",
"(",
"(",
"pointIndex",
"+",
"1",
")",
"%",
"len",
"(",
"loop",
")",
")",
"]",
"end",
"=",
"loop",
"[",
"(",
"(",
"pointIndex",
"+",
"2",
")",
"%",
"len",
"(",
"loop",
")",
")",
"]",
"addInsetPointFromClockwiseTriple",
"(",
"begin",
",",
"center",
",",
"end",
",",
"inset",
",",
"radius",
")",
"return",
"getWithoutIntersections",
"(",
"euclidean",
".",
"getSimplifiedLoop",
"(",
"inset",
",",
"radius",
")",
")"
] |
get loop inset from clockwise loop .
|
train
| false
|
52,446
|
def merged_cached_query(fn):
def merge_wrapper(*args, **kwargs):
queries = fn(*args, **kwargs)
return MergedCachedQuery(queries)
return merge_wrapper
|
[
"def",
"merged_cached_query",
"(",
"fn",
")",
":",
"def",
"merge_wrapper",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"queries",
"=",
"fn",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"MergedCachedQuery",
"(",
"queries",
")",
"return",
"merge_wrapper"
] |
decorate a function describing a cached query made up of others .
|
train
| false
|
52,449
|
def parse_query_params(strategy, response, *args, **kwargs):
auth_entry = strategy.session.get(AUTH_ENTRY_KEY)
if (not (auth_entry and (auth_entry in _AUTH_ENTRY_CHOICES))):
raise AuthEntryError(strategy.request.backend, 'auth_entry missing or invalid')
return {'auth_entry': auth_entry}
|
[
"def",
"parse_query_params",
"(",
"strategy",
",",
"response",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"auth_entry",
"=",
"strategy",
".",
"session",
".",
"get",
"(",
"AUTH_ENTRY_KEY",
")",
"if",
"(",
"not",
"(",
"auth_entry",
"and",
"(",
"auth_entry",
"in",
"_AUTH_ENTRY_CHOICES",
")",
")",
")",
":",
"raise",
"AuthEntryError",
"(",
"strategy",
".",
"request",
".",
"backend",
",",
"'auth_entry missing or invalid'",
")",
"return",
"{",
"'auth_entry'",
":",
"auth_entry",
"}"
] |
reads whitelisted query params .
|
train
| false
|
52,450
|
def _trivial_gcd(f, g):
ring = f.ring
if (not (f or g)):
return (ring.zero, ring.zero, ring.zero)
elif (not f):
if (g.LC < ring.domain.zero):
return ((- g), ring.zero, (- ring.one))
else:
return (g, ring.zero, ring.one)
elif (not g):
if (f.LC < ring.domain.zero):
return ((- f), (- ring.one), ring.zero)
else:
return (f, ring.one, ring.zero)
return None
|
[
"def",
"_trivial_gcd",
"(",
"f",
",",
"g",
")",
":",
"ring",
"=",
"f",
".",
"ring",
"if",
"(",
"not",
"(",
"f",
"or",
"g",
")",
")",
":",
"return",
"(",
"ring",
".",
"zero",
",",
"ring",
".",
"zero",
",",
"ring",
".",
"zero",
")",
"elif",
"(",
"not",
"f",
")",
":",
"if",
"(",
"g",
".",
"LC",
"<",
"ring",
".",
"domain",
".",
"zero",
")",
":",
"return",
"(",
"(",
"-",
"g",
")",
",",
"ring",
".",
"zero",
",",
"(",
"-",
"ring",
".",
"one",
")",
")",
"else",
":",
"return",
"(",
"g",
",",
"ring",
".",
"zero",
",",
"ring",
".",
"one",
")",
"elif",
"(",
"not",
"g",
")",
":",
"if",
"(",
"f",
".",
"LC",
"<",
"ring",
".",
"domain",
".",
"zero",
")",
":",
"return",
"(",
"(",
"-",
"f",
")",
",",
"(",
"-",
"ring",
".",
"one",
")",
",",
"ring",
".",
"zero",
")",
"else",
":",
"return",
"(",
"f",
",",
"ring",
".",
"one",
",",
"ring",
".",
"zero",
")",
"return",
"None"
] |
compute the gcd of two polynomials in trivial cases .
|
train
| false
|
52,451
|
def optionalcascade(cont_attr, item_attr, doc=''):
def getter(self):
if self._items:
return getattr(self[0], item_attr)
else:
return getattr(self, cont_attr)
def setter(self, value):
setattr(self, cont_attr, value)
for item in self:
setattr(item, item_attr, value)
return property(fget=getter, fset=setter, doc=doc)
|
[
"def",
"optionalcascade",
"(",
"cont_attr",
",",
"item_attr",
",",
"doc",
"=",
"''",
")",
":",
"def",
"getter",
"(",
"self",
")",
":",
"if",
"self",
".",
"_items",
":",
"return",
"getattr",
"(",
"self",
"[",
"0",
"]",
",",
"item_attr",
")",
"else",
":",
"return",
"getattr",
"(",
"self",
",",
"cont_attr",
")",
"def",
"setter",
"(",
"self",
",",
"value",
")",
":",
"setattr",
"(",
"self",
",",
"cont_attr",
",",
"value",
")",
"for",
"item",
"in",
"self",
":",
"setattr",
"(",
"item",
",",
"item_attr",
",",
"value",
")",
"return",
"property",
"(",
"fget",
"=",
"getter",
",",
"fset",
"=",
"setter",
",",
"doc",
"=",
"doc",
")"
] |
returns a getter property with a cascading setter .
|
train
| false
|
52,453
|
def get_info_for_reshard(stream_details):
min_hash_key = 0
max_hash_key = 0
stream_details['OpenShards'] = []
for shard in stream_details['Shards']:
shard_id = shard['ShardId']
if ('EndingSequenceNumber' in shard['SequenceNumberRange']):
log.debug('skipping closed shard {0}'.format(shard_id))
continue
stream_details['OpenShards'].append(shard)
shard['HashKeyRange']['StartingHashKey'] = long_int(shard['HashKeyRange']['StartingHashKey'])
shard['HashKeyRange']['EndingHashKey'] = long_int(shard['HashKeyRange']['EndingHashKey'])
if (shard['HashKeyRange']['StartingHashKey'] < min_hash_key):
min_hash_key = shard['HashKeyRange']['StartingHashKey']
if (shard['HashKeyRange']['EndingHashKey'] > max_hash_key):
max_hash_key = shard['HashKeyRange']['EndingHashKey']
stream_details['OpenShards'].sort(key=(lambda shard: long_int(shard['HashKeyRange']['StartingHashKey'])))
return (min_hash_key, max_hash_key, stream_details)
|
[
"def",
"get_info_for_reshard",
"(",
"stream_details",
")",
":",
"min_hash_key",
"=",
"0",
"max_hash_key",
"=",
"0",
"stream_details",
"[",
"'OpenShards'",
"]",
"=",
"[",
"]",
"for",
"shard",
"in",
"stream_details",
"[",
"'Shards'",
"]",
":",
"shard_id",
"=",
"shard",
"[",
"'ShardId'",
"]",
"if",
"(",
"'EndingSequenceNumber'",
"in",
"shard",
"[",
"'SequenceNumberRange'",
"]",
")",
":",
"log",
".",
"debug",
"(",
"'skipping closed shard {0}'",
".",
"format",
"(",
"shard_id",
")",
")",
"continue",
"stream_details",
"[",
"'OpenShards'",
"]",
".",
"append",
"(",
"shard",
")",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'StartingHashKey'",
"]",
"=",
"long_int",
"(",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'StartingHashKey'",
"]",
")",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'EndingHashKey'",
"]",
"=",
"long_int",
"(",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'EndingHashKey'",
"]",
")",
"if",
"(",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'StartingHashKey'",
"]",
"<",
"min_hash_key",
")",
":",
"min_hash_key",
"=",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'StartingHashKey'",
"]",
"if",
"(",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'EndingHashKey'",
"]",
">",
"max_hash_key",
")",
":",
"max_hash_key",
"=",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'EndingHashKey'",
"]",
"stream_details",
"[",
"'OpenShards'",
"]",
".",
"sort",
"(",
"key",
"=",
"(",
"lambda",
"shard",
":",
"long_int",
"(",
"shard",
"[",
"'HashKeyRange'",
"]",
"[",
"'StartingHashKey'",
"]",
")",
")",
")",
"return",
"(",
"min_hash_key",
",",
"max_hash_key",
",",
"stream_details",
")"
] |
collect some data: number of open shards .
|
train
| true
|
52,455
|
def catch_errors(application, environ, start_response, error_callback, ok_callback=None):
try:
app_iter = application(environ, start_response)
except:
error_callback(sys.exc_info())
raise
if (type(app_iter) in (list, tuple)):
if ok_callback:
ok_callback()
return app_iter
else:
return _wrap_app_iter(app_iter, error_callback, ok_callback)
|
[
"def",
"catch_errors",
"(",
"application",
",",
"environ",
",",
"start_response",
",",
"error_callback",
",",
"ok_callback",
"=",
"None",
")",
":",
"try",
":",
"app_iter",
"=",
"application",
"(",
"environ",
",",
"start_response",
")",
"except",
":",
"error_callback",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
"raise",
"if",
"(",
"type",
"(",
"app_iter",
")",
"in",
"(",
"list",
",",
"tuple",
")",
")",
":",
"if",
"ok_callback",
":",
"ok_callback",
"(",
")",
"return",
"app_iter",
"else",
":",
"return",
"_wrap_app_iter",
"(",
"app_iter",
",",
"error_callback",
",",
"ok_callback",
")"
] |
runs the application .
|
train
| false
|
52,456
|
def class_for_kind(kind):
try:
return _kind_map[kind]
except KeyError:
raise KindError(("No implementation for kind '%s'" % kind))
|
[
"def",
"class_for_kind",
"(",
"kind",
")",
":",
"try",
":",
"return",
"_kind_map",
"[",
"kind",
"]",
"except",
"KeyError",
":",
"raise",
"KindError",
"(",
"(",
"\"No implementation for kind '%s'\"",
"%",
"kind",
")",
")"
] |
return base-class responsible for implementing kind .
|
train
| false
|
52,458
|
def test_close_process_when_exception():
exc = Exception('boom')
with pytest.raises(Exception) as e:
with pipeline.get_cat_pipeline(pipeline.PIPE, pipeline.PIPE) as pl:
assert (len(pl.commands) == 1)
assert (pl.commands[0]._process.poll() is None)
raise exc
assert (e.value is exc)
pipeline_wait(pl)
|
[
"def",
"test_close_process_when_exception",
"(",
")",
":",
"exc",
"=",
"Exception",
"(",
"'boom'",
")",
"with",
"pytest",
".",
"raises",
"(",
"Exception",
")",
"as",
"e",
":",
"with",
"pipeline",
".",
"get_cat_pipeline",
"(",
"pipeline",
".",
"PIPE",
",",
"pipeline",
".",
"PIPE",
")",
"as",
"pl",
":",
"assert",
"(",
"len",
"(",
"pl",
".",
"commands",
")",
"==",
"1",
")",
"assert",
"(",
"pl",
".",
"commands",
"[",
"0",
"]",
".",
"_process",
".",
"poll",
"(",
")",
"is",
"None",
")",
"raise",
"exc",
"assert",
"(",
"e",
".",
"value",
"is",
"exc",
")",
"pipeline_wait",
"(",
"pl",
")"
] |
process leaks must not occur when an exception is raised .
|
train
| false
|
52,459
|
def _cmp_by_origin(path1, path2):
def get_origin_pref(origin):
if (origin.value == BGP_ATTR_ORIGIN_IGP):
return 3
elif (origin.value == BGP_ATTR_ORIGIN_EGP):
return 2
elif (origin.value == BGP_ATTR_ORIGIN_INCOMPLETE):
return 1
else:
LOG.error('Invalid origin value encountered %s.', origin)
return 0
origin1 = path1.get_pattr(BGP_ATTR_TYPE_ORIGIN)
origin2 = path2.get_pattr(BGP_ATTR_TYPE_ORIGIN)
assert ((origin1 is not None) and (origin2 is not None))
if (origin1.value == origin2.value):
return None
origin1 = get_origin_pref(origin1)
origin2 = get_origin_pref(origin2)
if (origin1 == origin2):
return None
elif (origin1 > origin2):
return path1
return path2
|
[
"def",
"_cmp_by_origin",
"(",
"path1",
",",
"path2",
")",
":",
"def",
"get_origin_pref",
"(",
"origin",
")",
":",
"if",
"(",
"origin",
".",
"value",
"==",
"BGP_ATTR_ORIGIN_IGP",
")",
":",
"return",
"3",
"elif",
"(",
"origin",
".",
"value",
"==",
"BGP_ATTR_ORIGIN_EGP",
")",
":",
"return",
"2",
"elif",
"(",
"origin",
".",
"value",
"==",
"BGP_ATTR_ORIGIN_INCOMPLETE",
")",
":",
"return",
"1",
"else",
":",
"LOG",
".",
"error",
"(",
"'Invalid origin value encountered %s.'",
",",
"origin",
")",
"return",
"0",
"origin1",
"=",
"path1",
".",
"get_pattr",
"(",
"BGP_ATTR_TYPE_ORIGIN",
")",
"origin2",
"=",
"path2",
".",
"get_pattr",
"(",
"BGP_ATTR_TYPE_ORIGIN",
")",
"assert",
"(",
"(",
"origin1",
"is",
"not",
"None",
")",
"and",
"(",
"origin2",
"is",
"not",
"None",
")",
")",
"if",
"(",
"origin1",
".",
"value",
"==",
"origin2",
".",
"value",
")",
":",
"return",
"None",
"origin1",
"=",
"get_origin_pref",
"(",
"origin1",
")",
"origin2",
"=",
"get_origin_pref",
"(",
"origin2",
")",
"if",
"(",
"origin1",
"==",
"origin2",
")",
":",
"return",
"None",
"elif",
"(",
"origin1",
">",
"origin2",
")",
":",
"return",
"path1",
"return",
"path2"
] |
select the best path based on origin attribute .
|
train
| true
|
52,460
|
def dihedral(n):
if (n == 1):
(yield Permutation([0, 1]))
(yield Permutation([1, 0]))
elif (n == 2):
(yield Permutation([0, 1, 2, 3]))
(yield Permutation([1, 0, 3, 2]))
(yield Permutation([2, 3, 0, 1]))
(yield Permutation([3, 2, 1, 0]))
else:
gen = list(range(n))
for i in range(n):
(yield Permutation(gen))
(yield Permutation(gen[::(-1)]))
gen = rotate_left(gen, 1)
|
[
"def",
"dihedral",
"(",
"n",
")",
":",
"if",
"(",
"n",
"==",
"1",
")",
":",
"(",
"yield",
"Permutation",
"(",
"[",
"0",
",",
"1",
"]",
")",
")",
"(",
"yield",
"Permutation",
"(",
"[",
"1",
",",
"0",
"]",
")",
")",
"elif",
"(",
"n",
"==",
"2",
")",
":",
"(",
"yield",
"Permutation",
"(",
"[",
"0",
",",
"1",
",",
"2",
",",
"3",
"]",
")",
")",
"(",
"yield",
"Permutation",
"(",
"[",
"1",
",",
"0",
",",
"3",
",",
"2",
"]",
")",
")",
"(",
"yield",
"Permutation",
"(",
"[",
"2",
",",
"3",
",",
"0",
",",
"1",
"]",
")",
")",
"(",
"yield",
"Permutation",
"(",
"[",
"3",
",",
"2",
",",
"1",
",",
"0",
"]",
")",
")",
"else",
":",
"gen",
"=",
"list",
"(",
"range",
"(",
"n",
")",
")",
"for",
"i",
"in",
"range",
"(",
"n",
")",
":",
"(",
"yield",
"Permutation",
"(",
"gen",
")",
")",
"(",
"yield",
"Permutation",
"(",
"gen",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
")",
")",
"gen",
"=",
"rotate_left",
"(",
"gen",
",",
"1",
")"
] |
generates the dihedral group of order 2n .
|
train
| false
|
52,461
|
def iter_child_nodes(node):
for (name, field) in iter_fields(node):
if isinstance(field, AST):
(yield field)
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
(yield item)
|
[
"def",
"iter_child_nodes",
"(",
"node",
")",
":",
"for",
"(",
"name",
",",
"field",
")",
"in",
"iter_fields",
"(",
"node",
")",
":",
"if",
"isinstance",
"(",
"field",
",",
"AST",
")",
":",
"(",
"yield",
"field",
")",
"elif",
"isinstance",
"(",
"field",
",",
"list",
")",
":",
"for",
"item",
"in",
"field",
":",
"if",
"isinstance",
"(",
"item",
",",
"AST",
")",
":",
"(",
"yield",
"item",
")"
] |
iterate over all child nodes or a node .
|
train
| true
|
52,462
|
def create_upload_form_attributes(prefix, input_type, name):
attributes = {'class': 'switched', 'data-switch-on': (prefix + 'source')}
attributes[((('data-' + prefix) + 'source-') + input_type)] = name
return attributes
|
[
"def",
"create_upload_form_attributes",
"(",
"prefix",
",",
"input_type",
",",
"name",
")",
":",
"attributes",
"=",
"{",
"'class'",
":",
"'switched'",
",",
"'data-switch-on'",
":",
"(",
"prefix",
"+",
"'source'",
")",
"}",
"attributes",
"[",
"(",
"(",
"(",
"'data-'",
"+",
"prefix",
")",
"+",
"'source-'",
")",
"+",
"input_type",
")",
"]",
"=",
"name",
"return",
"attributes"
] |
creates attribute dicts for the switchable upload form :type prefix: str .
|
train
| false
|
52,463
|
@_assure_identity
def auth_with_token(token, tenant_id=None, tenant_name=None, region=None):
global regions, services
identity.auth_with_token(token, tenant_id=tenant_id, tenant_name=tenant_name)
regions = tuple(identity.regions)
services = tuple(identity.services.keys())
connect_to_services(region=region)
|
[
"@",
"_assure_identity",
"def",
"auth_with_token",
"(",
"token",
",",
"tenant_id",
"=",
"None",
",",
"tenant_name",
"=",
"None",
",",
"region",
"=",
"None",
")",
":",
"global",
"regions",
",",
"services",
"identity",
".",
"auth_with_token",
"(",
"token",
",",
"tenant_id",
"=",
"tenant_id",
",",
"tenant_name",
"=",
"tenant_name",
")",
"regions",
"=",
"tuple",
"(",
"identity",
".",
"regions",
")",
"services",
"=",
"tuple",
"(",
"identity",
".",
"services",
".",
"keys",
"(",
")",
")",
"connect_to_services",
"(",
"region",
"=",
"region",
")"
] |
if you already have a valid token and either a tenant id or name .
|
train
| true
|
52,464
|
def get_actor(tr, chamber):
actor = tr[0].text_content().strip()
if (actor.startswith('H') or actor.startswith('S')):
actor = actor[0]
return {'H': 'lower', 'S': 'upper'}[actor]
else:
h_or_s = tr.xpath(('ancestor::table[1]/preceding-sibling::' + 'table/tr/td/b[contains(text(), "TRANSMIT TO")]'))
if h_or_s:
h_or_s = h_or_s[(-1)].text_content().strip()
actor = ('upper' if h_or_s.endswith('SENATE:') else 'lower')
else:
actor = chamber
return actor
|
[
"def",
"get_actor",
"(",
"tr",
",",
"chamber",
")",
":",
"actor",
"=",
"tr",
"[",
"0",
"]",
".",
"text_content",
"(",
")",
".",
"strip",
"(",
")",
"if",
"(",
"actor",
".",
"startswith",
"(",
"'H'",
")",
"or",
"actor",
".",
"startswith",
"(",
"'S'",
")",
")",
":",
"actor",
"=",
"actor",
"[",
"0",
"]",
"return",
"{",
"'H'",
":",
"'lower'",
",",
"'S'",
":",
"'upper'",
"}",
"[",
"actor",
"]",
"else",
":",
"h_or_s",
"=",
"tr",
".",
"xpath",
"(",
"(",
"'ancestor::table[1]/preceding-sibling::'",
"+",
"'table/tr/td/b[contains(text(), \"TRANSMIT TO\")]'",
")",
")",
"if",
"h_or_s",
":",
"h_or_s",
"=",
"h_or_s",
"[",
"(",
"-",
"1",
")",
"]",
".",
"text_content",
"(",
")",
".",
"strip",
"(",
")",
"actor",
"=",
"(",
"'upper'",
"if",
"h_or_s",
".",
"endswith",
"(",
"'SENATE:'",
")",
"else",
"'lower'",
")",
"else",
":",
"actor",
"=",
"chamber",
"return",
"actor"
] |
gets the actor of a given action based on presence of a transmit to action .
|
train
| false
|
52,465
|
def get_copy_folder_location():
copy_settings_path = 'Library/Application Support/Copy Agent/config.db'
copy_home = None
copy_settings = os.path.join(os.environ['HOME'], copy_settings_path)
if os.path.isfile(copy_settings):
database = sqlite3.connect(copy_settings)
if database:
cur = database.cursor()
query = "SELECT value FROM config2 WHERE option = 'csmRootPath';"
cur.execute(query)
data = cur.fetchone()
copy_home = unicode(data[0])
cur.close()
if (not copy_home):
error('Unable to find your Copy install =(')
return copy_home
|
[
"def",
"get_copy_folder_location",
"(",
")",
":",
"copy_settings_path",
"=",
"'Library/Application Support/Copy Agent/config.db'",
"copy_home",
"=",
"None",
"copy_settings",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"environ",
"[",
"'HOME'",
"]",
",",
"copy_settings_path",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"copy_settings",
")",
":",
"database",
"=",
"sqlite3",
".",
"connect",
"(",
"copy_settings",
")",
"if",
"database",
":",
"cur",
"=",
"database",
".",
"cursor",
"(",
")",
"query",
"=",
"\"SELECT value FROM config2 WHERE option = 'csmRootPath';\"",
"cur",
".",
"execute",
"(",
"query",
")",
"data",
"=",
"cur",
".",
"fetchone",
"(",
")",
"copy_home",
"=",
"unicode",
"(",
"data",
"[",
"0",
"]",
")",
"cur",
".",
"close",
"(",
")",
"if",
"(",
"not",
"copy_home",
")",
":",
"error",
"(",
"'Unable to find your Copy install =('",
")",
"return",
"copy_home"
] |
try to locate the copy folder .
|
train
| true
|
52,469
|
def keip_zeros(nt):
if ((not isscalar(nt)) or (floor(nt) != nt) or (nt <= 0)):
raise ValueError('nt must be positive integer scalar.')
return specfun.klvnzo(nt, 8)
|
[
"def",
"keip_zeros",
"(",
"nt",
")",
":",
"if",
"(",
"(",
"not",
"isscalar",
"(",
"nt",
")",
")",
"or",
"(",
"floor",
"(",
"nt",
")",
"!=",
"nt",
")",
"or",
"(",
"nt",
"<=",
"0",
")",
")",
":",
"raise",
"ValueError",
"(",
"'nt must be positive integer scalar.'",
")",
"return",
"specfun",
".",
"klvnzo",
"(",
"nt",
",",
"8",
")"
] |
compute nt zeros of the kelvin function kei(x) .
|
train
| false
|
52,470
|
def in_transaction():
from . import tasklets
return tasklets.get_context().in_transaction()
|
[
"def",
"in_transaction",
"(",
")",
":",
"from",
".",
"import",
"tasklets",
"return",
"tasklets",
".",
"get_context",
"(",
")",
".",
"in_transaction",
"(",
")"
] |
return whether a transaction is currently active .
|
train
| false
|
52,471
|
def create_api_model(restApiId, modelName, modelDescription, schema, contentType='application/json', region=None, key=None, keyid=None, profile=None):
try:
schema_json = (json.dumps(schema) if isinstance(schema, dict) else schema)
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
model = conn.create_model(restApiId=restApiId, name=modelName, description=modelDescription, schema=schema_json, contentType=contentType)
return {'created': True, 'model': _convert_datetime_str(model)}
except ClientError as e:
return {'created': False, 'error': salt.utils.boto3.get_error(e)}
|
[
"def",
"create_api_model",
"(",
"restApiId",
",",
"modelName",
",",
"modelDescription",
",",
"schema",
",",
"contentType",
"=",
"'application/json'",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"schema_json",
"=",
"(",
"json",
".",
"dumps",
"(",
"schema",
")",
"if",
"isinstance",
"(",
"schema",
",",
"dict",
")",
"else",
"schema",
")",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"model",
"=",
"conn",
".",
"create_model",
"(",
"restApiId",
"=",
"restApiId",
",",
"name",
"=",
"modelName",
",",
"description",
"=",
"modelDescription",
",",
"schema",
"=",
"schema_json",
",",
"contentType",
"=",
"contentType",
")",
"return",
"{",
"'created'",
":",
"True",
",",
"'model'",
":",
"_convert_datetime_str",
"(",
"model",
")",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'created'",
":",
"False",
",",
"'error'",
":",
"salt",
".",
"utils",
".",
"boto3",
".",
"get_error",
"(",
"e",
")",
"}"
] |
create a new model in a given api with a given schema .
|
train
| true
|
52,472
|
def put_multi_async(entities, **ctx_options):
return [entity.put_async(**ctx_options) for entity in entities]
|
[
"def",
"put_multi_async",
"(",
"entities",
",",
"**",
"ctx_options",
")",
":",
"return",
"[",
"entity",
".",
"put_async",
"(",
"**",
"ctx_options",
")",
"for",
"entity",
"in",
"entities",
"]"
] |
stores a sequence of model instances .
|
train
| false
|
52,473
|
def can_browse_repository_reviews(app, user, repository):
if user:
for review in repository.reviews:
for component_review in review.component_reviews:
if app.security_agent.user_can_browse_component_review(app, repository, component_review, user):
return True
return False
|
[
"def",
"can_browse_repository_reviews",
"(",
"app",
",",
"user",
",",
"repository",
")",
":",
"if",
"user",
":",
"for",
"review",
"in",
"repository",
".",
"reviews",
":",
"for",
"component_review",
"in",
"review",
".",
"component_reviews",
":",
"if",
"app",
".",
"security_agent",
".",
"user_can_browse_component_review",
"(",
"app",
",",
"repository",
",",
"component_review",
",",
"user",
")",
":",
"return",
"True",
"return",
"False"
] |
determine if there are any reviews of the received repository for which the current user has permission to browse any component reviews .
|
train
| false
|
52,474
|
def convert_pid(value):
return int(value, 16)
|
[
"def",
"convert_pid",
"(",
"value",
")",
":",
"return",
"int",
"(",
"value",
",",
"16",
")"
] |
convert pid from hex string to integer .
|
train
| false
|
52,476
|
def CDL3OUTSIDE(barDs, count):
return call_talib_with_ohlc(barDs, count, talib.CDL3OUTSIDE)
|
[
"def",
"CDL3OUTSIDE",
"(",
"barDs",
",",
"count",
")",
":",
"return",
"call_talib_with_ohlc",
"(",
"barDs",
",",
"count",
",",
"talib",
".",
"CDL3OUTSIDE",
")"
] |
three outside up/down .
|
train
| false
|
52,478
|
@task
@timeit
def add_short_links(doc_ids):
base_url = 'https://{0}%s'.format(Site.objects.get_current().domain)
docs = Document.objects.filter(id__in=doc_ids)
try:
pin_this_thread()
for doc in docs:
endpoint = django_reverse('wiki.document', args=[doc.slug])
doc.update(share_link=generate_short_url((base_url % endpoint)))
statsd.incr('wiki.add_short_links.success')
except BitlyRateLimitException:
statsd.incr('wiki.add_short_links.rate_limited')
pass
finally:
unpin_this_thread()
|
[
"@",
"task",
"@",
"timeit",
"def",
"add_short_links",
"(",
"doc_ids",
")",
":",
"base_url",
"=",
"'https://{0}%s'",
".",
"format",
"(",
"Site",
".",
"objects",
".",
"get_current",
"(",
")",
".",
"domain",
")",
"docs",
"=",
"Document",
".",
"objects",
".",
"filter",
"(",
"id__in",
"=",
"doc_ids",
")",
"try",
":",
"pin_this_thread",
"(",
")",
"for",
"doc",
"in",
"docs",
":",
"endpoint",
"=",
"django_reverse",
"(",
"'wiki.document'",
",",
"args",
"=",
"[",
"doc",
".",
"slug",
"]",
")",
"doc",
".",
"update",
"(",
"share_link",
"=",
"generate_short_url",
"(",
"(",
"base_url",
"%",
"endpoint",
")",
")",
")",
"statsd",
".",
"incr",
"(",
"'wiki.add_short_links.success'",
")",
"except",
"BitlyRateLimitException",
":",
"statsd",
".",
"incr",
"(",
"'wiki.add_short_links.rate_limited'",
")",
"pass",
"finally",
":",
"unpin_this_thread",
"(",
")"
] |
create short_urls for a list of docs .
|
train
| false
|
52,479
|
def single_source_bellman_ford(G, source, target=None, cutoff=None, weight='weight'):
if (source == target):
return ({source: 0}, {source: [source]})
weight = _weight_function(G, weight)
paths = {source: [source]}
return (_bellman_ford(G, [source], weight, paths=paths, cutoff=cutoff, target=target), paths)
|
[
"def",
"single_source_bellman_ford",
"(",
"G",
",",
"source",
",",
"target",
"=",
"None",
",",
"cutoff",
"=",
"None",
",",
"weight",
"=",
"'weight'",
")",
":",
"if",
"(",
"source",
"==",
"target",
")",
":",
"return",
"(",
"{",
"source",
":",
"0",
"}",
",",
"{",
"source",
":",
"[",
"source",
"]",
"}",
")",
"weight",
"=",
"_weight_function",
"(",
"G",
",",
"weight",
")",
"paths",
"=",
"{",
"source",
":",
"[",
"source",
"]",
"}",
"return",
"(",
"_bellman_ford",
"(",
"G",
",",
"[",
"source",
"]",
",",
"weight",
",",
"paths",
"=",
"paths",
",",
"cutoff",
"=",
"cutoff",
",",
"target",
"=",
"target",
")",
",",
"paths",
")"
] |
compute shortest paths and lengths in a weighted graph g .
|
train
| false
|
52,480
|
def release():
return uname()[2]
|
[
"def",
"release",
"(",
")",
":",
"return",
"uname",
"(",
")",
"[",
"2",
"]"
] |
returns the systems release .
|
train
| false
|
52,481
|
def restore_disks(job, restore=False, disk_list=None):
if (restore and (disk_list is not None)):
prepare_disks(job, 'ext2 / -q -i20480 -m1 / / restore_ext2', disk1_only=False, disk_list=disk_list)
|
[
"def",
"restore_disks",
"(",
"job",
",",
"restore",
"=",
"False",
",",
"disk_list",
"=",
"None",
")",
":",
"if",
"(",
"restore",
"and",
"(",
"disk_list",
"is",
"not",
"None",
")",
")",
":",
"prepare_disks",
"(",
"job",
",",
"'ext2 / -q -i20480 -m1 / / restore_ext2'",
",",
"disk1_only",
"=",
"False",
",",
"disk_list",
"=",
"disk_list",
")"
] |
restore ext2 on the drives in disk_list if restore is true; when disk_list is none .
|
train
| false
|
52,483
|
@world.absorb
def css_contains_text(css_selector, partial_text, index=0):
if partial_text:
wait_for((lambda _: css_html(css_selector, index=index)), timeout=8)
actual_text = css_html(css_selector, index=index)
return (partial_text in actual_text)
|
[
"@",
"world",
".",
"absorb",
"def",
"css_contains_text",
"(",
"css_selector",
",",
"partial_text",
",",
"index",
"=",
"0",
")",
":",
"if",
"partial_text",
":",
"wait_for",
"(",
"(",
"lambda",
"_",
":",
"css_html",
"(",
"css_selector",
",",
"index",
"=",
"index",
")",
")",
",",
"timeout",
"=",
"8",
")",
"actual_text",
"=",
"css_html",
"(",
"css_selector",
",",
"index",
"=",
"index",
")",
"return",
"(",
"partial_text",
"in",
"actual_text",
")"
] |
return a boolean indicating whether the element with css_selector contains partial_text .
|
train
| false
|
52,486
|
def _match_emr_bootstrap_stderr_path(path, node_id=None, action_num=None):
m = _EMR_BOOTSTRAP_STDERR_PATH_RE.match(path)
if (not m):
return
result = _extract_action_num_and_node_id(m)
if ((action_num is not None) and (action_num != result['action_num'])):
return None
if ((node_id is not None) and (node_id != result['node_id'])):
return None
return result
|
[
"def",
"_match_emr_bootstrap_stderr_path",
"(",
"path",
",",
"node_id",
"=",
"None",
",",
"action_num",
"=",
"None",
")",
":",
"m",
"=",
"_EMR_BOOTSTRAP_STDERR_PATH_RE",
".",
"match",
"(",
"path",
")",
"if",
"(",
"not",
"m",
")",
":",
"return",
"result",
"=",
"_extract_action_num_and_node_id",
"(",
"m",
")",
"if",
"(",
"(",
"action_num",
"is",
"not",
"None",
")",
"and",
"(",
"action_num",
"!=",
"result",
"[",
"'action_num'",
"]",
")",
")",
":",
"return",
"None",
"if",
"(",
"(",
"node_id",
"is",
"not",
"None",
")",
"and",
"(",
"node_id",
"!=",
"result",
"[",
"'node_id'",
"]",
")",
")",
":",
"return",
"None",
"return",
"result"
] |
if *path* corresponds to a bootstrap stderr file .
|
train
| false
|
52,488
|
def add_extra_output_destination(writeable_object, important_level=0, name=None):
global extra_print_dests
extra_print_dests.append({'dest': writeable_object, 'name': name, 'important_level': important_level})
|
[
"def",
"add_extra_output_destination",
"(",
"writeable_object",
",",
"important_level",
"=",
"0",
",",
"name",
"=",
"None",
")",
":",
"global",
"extra_print_dests",
"extra_print_dests",
".",
"append",
"(",
"{",
"'dest'",
":",
"writeable_object",
",",
"'name'",
":",
"name",
",",
"'important_level'",
":",
"important_level",
"}",
")"
] |
add extra places to print output to .
|
train
| false
|
52,489
|
def _checkState(manager):
manager.checkState()
|
[
"def",
"_checkState",
"(",
"manager",
")",
":",
"manager",
".",
"checkState",
"(",
")"
] |
prompt a relaying manager to check state .
|
train
| false
|
52,490
|
def parse_set_symm_diff(source, info):
items = [parse_set_inter(source, info)]
while source.match('~~'):
items.append(parse_set_inter(source, info))
if (len(items) == 1):
return items[0]
return SetSymDiff(info, items)
|
[
"def",
"parse_set_symm_diff",
"(",
"source",
",",
"info",
")",
":",
"items",
"=",
"[",
"parse_set_inter",
"(",
"source",
",",
"info",
")",
"]",
"while",
"source",
".",
"match",
"(",
"'~~'",
")",
":",
"items",
".",
"append",
"(",
"parse_set_inter",
"(",
"source",
",",
"info",
")",
")",
"if",
"(",
"len",
"(",
"items",
")",
"==",
"1",
")",
":",
"return",
"items",
"[",
"0",
"]",
"return",
"SetSymDiff",
"(",
"info",
",",
"items",
")"
] |
parses a set symmetric difference .
|
train
| false
|
52,491
|
def warnings_to_stdout():
showwarning_orig = warnings.showwarning
def showwarning(msg, cat, fname, lno, file=None, line=0):
showwarning_orig(msg, cat, os.path.basename(fname), line, sys.stdout)
warnings.showwarning = showwarning
|
[
"def",
"warnings_to_stdout",
"(",
")",
":",
"showwarning_orig",
"=",
"warnings",
".",
"showwarning",
"def",
"showwarning",
"(",
"msg",
",",
"cat",
",",
"fname",
",",
"lno",
",",
"file",
"=",
"None",
",",
"line",
"=",
"0",
")",
":",
"showwarning_orig",
"(",
"msg",
",",
"cat",
",",
"os",
".",
"path",
".",
"basename",
"(",
"fname",
")",
",",
"line",
",",
"sys",
".",
"stdout",
")",
"warnings",
".",
"showwarning",
"=",
"showwarning"
] |
redirect all warnings to stdout .
|
train
| false
|
52,493
|
def validate_no_arg_method(interface, method_name):
for (name, method) in interface.namesAndDescriptions():
if (name == method_name):
if (len(method.getSignatureInfo()['required']) > 0):
raise InvalidMethod('Method {!r} requires parameters'.format(method_name))
return
raise InvalidMethod('Method {!r} not found in interface {}'.format(method_name, interface.__name__))
|
[
"def",
"validate_no_arg_method",
"(",
"interface",
",",
"method_name",
")",
":",
"for",
"(",
"name",
",",
"method",
")",
"in",
"interface",
".",
"namesAndDescriptions",
"(",
")",
":",
"if",
"(",
"name",
"==",
"method_name",
")",
":",
"if",
"(",
"len",
"(",
"method",
".",
"getSignatureInfo",
"(",
")",
"[",
"'required'",
"]",
")",
">",
"0",
")",
":",
"raise",
"InvalidMethod",
"(",
"'Method {!r} requires parameters'",
".",
"format",
"(",
"method_name",
")",
")",
"return",
"raise",
"InvalidMethod",
"(",
"'Method {!r} not found in interface {}'",
".",
"format",
"(",
"method_name",
",",
"interface",
".",
"__name__",
")",
")"
] |
check that method name exists in interface and requires no parameters .
|
train
| false
|
52,495
|
def list_update(t):
slug = raw_input(light_magenta('Your list that you want to update: ', rl=True))
name = raw_input(light_magenta('Update name (leave blank to unchange): ', rl=True))
mode = raw_input(light_magenta('Update mode (public/private): ', rl=True))
description = raw_input(light_magenta('Update description: ', rl=True))
try:
if name:
t.lists.update(slug='-'.join(slug.split()), owner_screen_name=g['original_name'], name=name, mode=mode, description=description)
else:
t.lists.update(slug=slug, owner_screen_name=g['original_name'], mode=mode, description=description)
printNicely(green((slug + ' list is updated.')))
except:
debug_option()
printNicely(red('Oops something is wrong with Twitter :('))
|
[
"def",
"list_update",
"(",
"t",
")",
":",
"slug",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"'Your list that you want to update: '",
",",
"rl",
"=",
"True",
")",
")",
"name",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"'Update name (leave blank to unchange): '",
",",
"rl",
"=",
"True",
")",
")",
"mode",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"'Update mode (public/private): '",
",",
"rl",
"=",
"True",
")",
")",
"description",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"'Update description: '",
",",
"rl",
"=",
"True",
")",
")",
"try",
":",
"if",
"name",
":",
"t",
".",
"lists",
".",
"update",
"(",
"slug",
"=",
"'-'",
".",
"join",
"(",
"slug",
".",
"split",
"(",
")",
")",
",",
"owner_screen_name",
"=",
"g",
"[",
"'original_name'",
"]",
",",
"name",
"=",
"name",
",",
"mode",
"=",
"mode",
",",
"description",
"=",
"description",
")",
"else",
":",
"t",
".",
"lists",
".",
"update",
"(",
"slug",
"=",
"slug",
",",
"owner_screen_name",
"=",
"g",
"[",
"'original_name'",
"]",
",",
"mode",
"=",
"mode",
",",
"description",
"=",
"description",
")",
"printNicely",
"(",
"green",
"(",
"(",
"slug",
"+",
"' list is updated.'",
")",
")",
")",
"except",
":",
"debug_option",
"(",
")",
"printNicely",
"(",
"red",
"(",
"'Oops something is wrong with Twitter :('",
")",
")"
] |
update a list .
|
train
| false
|
52,497
|
def LoadSingleClientDeployInfo(client_deploy_info):
builder = yaml_object.ObjectBuilder(ClientDeployInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(client_deploy_info)
parsed_yaml = handler.GetResults()
if (not parsed_yaml):
raise EmptyYaml()
if (len(parsed_yaml) > 1):
raise MultipleClientDeployInfo()
return parsed_yaml[0]
|
[
"def",
"LoadSingleClientDeployInfo",
"(",
"client_deploy_info",
")",
":",
"builder",
"=",
"yaml_object",
".",
"ObjectBuilder",
"(",
"ClientDeployInfoExternal",
")",
"handler",
"=",
"yaml_builder",
".",
"BuilderHandler",
"(",
"builder",
")",
"listener",
"=",
"yaml_listener",
".",
"EventListener",
"(",
"handler",
")",
"listener",
".",
"Parse",
"(",
"client_deploy_info",
")",
"parsed_yaml",
"=",
"handler",
".",
"GetResults",
"(",
")",
"if",
"(",
"not",
"parsed_yaml",
")",
":",
"raise",
"EmptyYaml",
"(",
")",
"if",
"(",
"len",
"(",
"parsed_yaml",
")",
">",
"1",
")",
":",
"raise",
"MultipleClientDeployInfo",
"(",
")",
"return",
"parsed_yaml",
"[",
"0",
"]"
] |
returns a clientdeployinfoexternal from a deploy_info .
|
train
| false
|
52,499
|
def _ntp_dispatcher(payload):
cls = conf.raw_layer
if (payload is None):
cls = get_cls('NTPHeader')
else:
length = len(payload)
if (length >= _NTP_PACKET_MIN_SIZE):
first_byte = struct.unpack('!B', payload[0])[0]
mode_mask = 7
mode = (first_byte & mode_mask)
cls = get_cls(_ntp_cls_by_mode.get(mode))
return cls
|
[
"def",
"_ntp_dispatcher",
"(",
"payload",
")",
":",
"cls",
"=",
"conf",
".",
"raw_layer",
"if",
"(",
"payload",
"is",
"None",
")",
":",
"cls",
"=",
"get_cls",
"(",
"'NTPHeader'",
")",
"else",
":",
"length",
"=",
"len",
"(",
"payload",
")",
"if",
"(",
"length",
">=",
"_NTP_PACKET_MIN_SIZE",
")",
":",
"first_byte",
"=",
"struct",
".",
"unpack",
"(",
"'!B'",
",",
"payload",
"[",
"0",
"]",
")",
"[",
"0",
"]",
"mode_mask",
"=",
"7",
"mode",
"=",
"(",
"first_byte",
"&",
"mode_mask",
")",
"cls",
"=",
"get_cls",
"(",
"_ntp_cls_by_mode",
".",
"get",
"(",
"mode",
")",
")",
"return",
"cls"
] |
returns the right class for a given ntp packet .
|
train
| false
|
52,500
|
def getCraftValue(preferenceName, preferences):
for preference in preferences:
if preference.name.startswith(preferenceName):
return preference.value
return None
|
[
"def",
"getCraftValue",
"(",
"preferenceName",
",",
"preferences",
")",
":",
"for",
"preference",
"in",
"preferences",
":",
"if",
"preference",
".",
"name",
".",
"startswith",
"(",
"preferenceName",
")",
":",
"return",
"preference",
".",
"value",
"return",
"None"
] |
get craft preferences value .
|
train
| false
|
52,501
|
def push_monitor(model, name, transfer_experience=False, save_records=False):
assert hasattr(model, 'monitor')
old_monitor = model.monitor
setattr(model, name, old_monitor)
del model.monitor
if transfer_experience:
monitor = Monitor.get_monitor(model)
assert (monitor is not old_monitor)
monitor._num_batches_seen = old_monitor._num_batches_seen
monitor._examples_seen = old_monitor._examples_seen
monitor._epochs_seen = old_monitor._epochs_seen
if save_records:
monitor.on_channel_conflict = 'copy_history'
monitor.channels = copy.copy(old_monitor.channels)
for (key, value) in list(monitor.channels.items()):
value.prereqs = None
return model
|
[
"def",
"push_monitor",
"(",
"model",
",",
"name",
",",
"transfer_experience",
"=",
"False",
",",
"save_records",
"=",
"False",
")",
":",
"assert",
"hasattr",
"(",
"model",
",",
"'monitor'",
")",
"old_monitor",
"=",
"model",
".",
"monitor",
"setattr",
"(",
"model",
",",
"name",
",",
"old_monitor",
")",
"del",
"model",
".",
"monitor",
"if",
"transfer_experience",
":",
"monitor",
"=",
"Monitor",
".",
"get_monitor",
"(",
"model",
")",
"assert",
"(",
"monitor",
"is",
"not",
"old_monitor",
")",
"monitor",
".",
"_num_batches_seen",
"=",
"old_monitor",
".",
"_num_batches_seen",
"monitor",
".",
"_examples_seen",
"=",
"old_monitor",
".",
"_examples_seen",
"monitor",
".",
"_epochs_seen",
"=",
"old_monitor",
".",
"_epochs_seen",
"if",
"save_records",
":",
"monitor",
".",
"on_channel_conflict",
"=",
"'copy_history'",
"monitor",
".",
"channels",
"=",
"copy",
".",
"copy",
"(",
"old_monitor",
".",
"channels",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"list",
"(",
"monitor",
".",
"channels",
".",
"items",
"(",
")",
")",
":",
"value",
".",
"prereqs",
"=",
"None",
"return",
"model"
] |
when you load a model in a yaml file and you want to store its old monitor under a different name and start a new monitor .
|
train
| false
|
52,502
|
def test_missing_aws_stack(mocked_aws_cf_simple, monkeypatch):
monkeypatch.setattr(test_util.aws.CfStack, '__init__', mock_stack_not_found)
config = yaml.safe_load(mocked_aws_cf_simple)
aws_launcher = get_launcher(config['type'], config['provider_info'])
def check_stack_error(cmd, args):
with pytest.raises(LauncherError) as exinfo:
getattr(aws_launcher, cmd)(*args)
assert (exinfo.value.error == 'StackNotFound')
info = aws_launcher.create(config)
check_stack_error('wait', (info,))
check_stack_error('describe', (info,))
check_stack_error('delete', (info,))
check_stack_error('test', (info, 'py.test'))
|
[
"def",
"test_missing_aws_stack",
"(",
"mocked_aws_cf_simple",
",",
"monkeypatch",
")",
":",
"monkeypatch",
".",
"setattr",
"(",
"test_util",
".",
"aws",
".",
"CfStack",
",",
"'__init__'",
",",
"mock_stack_not_found",
")",
"config",
"=",
"yaml",
".",
"safe_load",
"(",
"mocked_aws_cf_simple",
")",
"aws_launcher",
"=",
"get_launcher",
"(",
"config",
"[",
"'type'",
"]",
",",
"config",
"[",
"'provider_info'",
"]",
")",
"def",
"check_stack_error",
"(",
"cmd",
",",
"args",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"LauncherError",
")",
"as",
"exinfo",
":",
"getattr",
"(",
"aws_launcher",
",",
"cmd",
")",
"(",
"*",
"args",
")",
"assert",
"(",
"exinfo",
".",
"value",
".",
"error",
"==",
"'StackNotFound'",
")",
"info",
"=",
"aws_launcher",
".",
"create",
"(",
"config",
")",
"check_stack_error",
"(",
"'wait'",
",",
"(",
"info",
",",
")",
")",
"check_stack_error",
"(",
"'describe'",
",",
"(",
"info",
",",
")",
")",
"check_stack_error",
"(",
"'delete'",
",",
"(",
"info",
",",
")",
")",
"check_stack_error",
"(",
"'test'",
",",
"(",
"info",
",",
"'py.test'",
")",
")"
] |
tests that clean and appropriate errors will be raised .
|
train
| false
|
52,503
|
def function_closure(functions, casify):
def render_function(children):
"\n Escape function names and give proper formatting to exceptions.\n\n The exceptions being 'sqrt', 'log2', and 'log10' as of now.\n "
fname = children[0].latex
if (casify(fname) not in functions):
pass
inner = children[1].latex
if (fname == 'sqrt'):
inner = u'{{{expr}}}'.format(expr=inner)
elif children[1].tall:
inner = u'\\left({expr}\\right)'.format(expr=inner)
else:
inner = u'({expr})'.format(expr=inner)
if (fname == 'sqrt'):
fname = u'\\sqrt'
elif (fname == 'log10'):
fname = u'\\log_{10}'
elif (fname == 'log2'):
fname = u'\\log_2'
else:
fname = u'\\text{{{fname}}}'.format(fname=fname)
latex = (fname + inner)
return LatexRendered(latex, tall=children[1].tall)
return render_function
|
[
"def",
"function_closure",
"(",
"functions",
",",
"casify",
")",
":",
"def",
"render_function",
"(",
"children",
")",
":",
"fname",
"=",
"children",
"[",
"0",
"]",
".",
"latex",
"if",
"(",
"casify",
"(",
"fname",
")",
"not",
"in",
"functions",
")",
":",
"pass",
"inner",
"=",
"children",
"[",
"1",
"]",
".",
"latex",
"if",
"(",
"fname",
"==",
"'sqrt'",
")",
":",
"inner",
"=",
"u'{{{expr}}}'",
".",
"format",
"(",
"expr",
"=",
"inner",
")",
"elif",
"children",
"[",
"1",
"]",
".",
"tall",
":",
"inner",
"=",
"u'\\\\left({expr}\\\\right)'",
".",
"format",
"(",
"expr",
"=",
"inner",
")",
"else",
":",
"inner",
"=",
"u'({expr})'",
".",
"format",
"(",
"expr",
"=",
"inner",
")",
"if",
"(",
"fname",
"==",
"'sqrt'",
")",
":",
"fname",
"=",
"u'\\\\sqrt'",
"elif",
"(",
"fname",
"==",
"'log10'",
")",
":",
"fname",
"=",
"u'\\\\log_{10}'",
"elif",
"(",
"fname",
"==",
"'log2'",
")",
":",
"fname",
"=",
"u'\\\\log_2'",
"else",
":",
"fname",
"=",
"u'\\\\text{{{fname}}}'",
".",
"format",
"(",
"fname",
"=",
"fname",
")",
"latex",
"=",
"(",
"fname",
"+",
"inner",
")",
"return",
"LatexRendered",
"(",
"latex",
",",
"tall",
"=",
"children",
"[",
"1",
"]",
".",
"tall",
")",
"return",
"render_function"
] |
wrap render_function so it knows the functions allowed .
|
train
| false
|
52,504
|
def closed_issues(issues, after):
logging.info('finding closed issues after {}...'.format(after))
seen = set()
for issue in issues:
if (closed_issue(issue, after) and (issue['title'] not in seen)):
seen.add(issue['title'])
(yield issue)
|
[
"def",
"closed_issues",
"(",
"issues",
",",
"after",
")",
":",
"logging",
".",
"info",
"(",
"'finding closed issues after {}...'",
".",
"format",
"(",
"after",
")",
")",
"seen",
"=",
"set",
"(",
")",
"for",
"issue",
"in",
"issues",
":",
"if",
"(",
"closed_issue",
"(",
"issue",
",",
"after",
")",
"and",
"(",
"issue",
"[",
"'title'",
"]",
"not",
"in",
"seen",
")",
")",
":",
"seen",
".",
"add",
"(",
"issue",
"[",
"'title'",
"]",
")",
"(",
"yield",
"issue",
")"
] |
yields closed issues given a list of issues .
|
train
| true
|
52,505
|
def dbcheck(exprstr, globals=None, locals=None):
def decorate(func):
expr = compile(exprstr, ('dbcheck-%s' % func.__name__), 'eval')
def check(*args, **kwds):
if (not eval(expr, globals, locals)):
raise DbcheckError(exprstr, func, args, kwds)
return func(*args, **kwds)
return check
return decorate
|
[
"def",
"dbcheck",
"(",
"exprstr",
",",
"globals",
"=",
"None",
",",
"locals",
"=",
"None",
")",
":",
"def",
"decorate",
"(",
"func",
")",
":",
"expr",
"=",
"compile",
"(",
"exprstr",
",",
"(",
"'dbcheck-%s'",
"%",
"func",
".",
"__name__",
")",
",",
"'eval'",
")",
"def",
"check",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
":",
"if",
"(",
"not",
"eval",
"(",
"expr",
",",
"globals",
",",
"locals",
")",
")",
":",
"raise",
"DbcheckError",
"(",
"exprstr",
",",
"func",
",",
"args",
",",
"kwds",
")",
"return",
"func",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
"return",
"check",
"return",
"decorate"
] |
decorator to implement debugging assertions .
|
train
| false
|
52,506
|
def subscribe_to_thread(user_id, feedback_thread_id):
subscriptions_model = user_models.UserSubscriptionsModel.get(user_id, strict=False)
if (not subscriptions_model):
subscriptions_model = user_models.UserSubscriptionsModel(id=user_id)
if (feedback_thread_id not in subscriptions_model.feedback_thread_ids):
subscriptions_model.feedback_thread_ids.append(feedback_thread_id)
subscriptions_model.put()
|
[
"def",
"subscribe_to_thread",
"(",
"user_id",
",",
"feedback_thread_id",
")",
":",
"subscriptions_model",
"=",
"user_models",
".",
"UserSubscriptionsModel",
".",
"get",
"(",
"user_id",
",",
"strict",
"=",
"False",
")",
"if",
"(",
"not",
"subscriptions_model",
")",
":",
"subscriptions_model",
"=",
"user_models",
".",
"UserSubscriptionsModel",
"(",
"id",
"=",
"user_id",
")",
"if",
"(",
"feedback_thread_id",
"not",
"in",
"subscriptions_model",
".",
"feedback_thread_ids",
")",
":",
"subscriptions_model",
".",
"feedback_thread_ids",
".",
"append",
"(",
"feedback_thread_id",
")",
"subscriptions_model",
".",
"put",
"(",
")"
] |
subscribes a user to a feedback thread .
|
train
| false
|
52,507
|
def _error_type_to_str(mod, type_):
return ('%s(%d)' % (_get_value_name(mod, type_, 'OFPET_'), type_))
|
[
"def",
"_error_type_to_str",
"(",
"mod",
",",
"type_",
")",
":",
"return",
"(",
"'%s(%d)'",
"%",
"(",
"_get_value_name",
"(",
"mod",
",",
"type_",
",",
"'OFPET_'",
")",
",",
"type_",
")",
")"
] |
this method is registered as ofp_error_type_to_str method into ryu .
|
train
| false
|
52,508
|
def get_terminal_width():
try:
import termios
import fcntl
import struct
call = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack('hhhh', 0, 0, 0, 0))
(height, width) = struct.unpack('hhhh', call)[:2]
terminal_width = width
except Exception:
terminal_width = (int(os.environ.get('COLUMNS', '80')) - 1)
return terminal_width
|
[
"def",
"get_terminal_width",
"(",
")",
":",
"try",
":",
"import",
"termios",
"import",
"fcntl",
"import",
"struct",
"call",
"=",
"fcntl",
".",
"ioctl",
"(",
"0",
",",
"termios",
".",
"TIOCGWINSZ",
",",
"struct",
".",
"pack",
"(",
"'hhhh'",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
")",
")",
"(",
"height",
",",
"width",
")",
"=",
"struct",
".",
"unpack",
"(",
"'hhhh'",
",",
"call",
")",
"[",
":",
"2",
"]",
"terminal_width",
"=",
"width",
"except",
"Exception",
":",
"terminal_width",
"=",
"(",
"int",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'COLUMNS'",
",",
"'80'",
")",
")",
"-",
"1",
")",
"return",
"terminal_width"
] |
borrowed from the py lib .
|
train
| false
|
52,509
|
def encode_mirror_url(raw_url_or_path, remote_domain=None, is_scheme=None, is_escape=False):
if is_escape:
_raw_url_or_path = raw_url_or_path.replace('r\\/', '/')
else:
_raw_url_or_path = raw_url_or_path
sp = urlsplit(_raw_url_or_path)
if ('/extdomains/' == sp.path[:12]):
return raw_url_or_path
domain = (remote_domain or sp.netloc or parse.remote_domain or target_domain)
if (domain not in allowed_domains_set):
return raw_url_or_path
if (is_scheme is not False):
if (_raw_url_or_path[:2] == '//'):
our_prefix = ('//' + my_host_name)
elif (is_scheme or sp.scheme):
our_prefix = myurl_prefix
else:
our_prefix = ''
else:
our_prefix = ''
if is_external_domain(domain):
middle_part = ('/extdomains/' + domain)
else:
middle_part = ''
result = urljoin(((our_prefix + middle_part) + '/'), extract_url_path_and_query(_raw_url_or_path).lstrip('/'))
if is_escape:
result = s_esc(result)
return result
|
[
"def",
"encode_mirror_url",
"(",
"raw_url_or_path",
",",
"remote_domain",
"=",
"None",
",",
"is_scheme",
"=",
"None",
",",
"is_escape",
"=",
"False",
")",
":",
"if",
"is_escape",
":",
"_raw_url_or_path",
"=",
"raw_url_or_path",
".",
"replace",
"(",
"'r\\\\/'",
",",
"'/'",
")",
"else",
":",
"_raw_url_or_path",
"=",
"raw_url_or_path",
"sp",
"=",
"urlsplit",
"(",
"_raw_url_or_path",
")",
"if",
"(",
"'/extdomains/'",
"==",
"sp",
".",
"path",
"[",
":",
"12",
"]",
")",
":",
"return",
"raw_url_or_path",
"domain",
"=",
"(",
"remote_domain",
"or",
"sp",
".",
"netloc",
"or",
"parse",
".",
"remote_domain",
"or",
"target_domain",
")",
"if",
"(",
"domain",
"not",
"in",
"allowed_domains_set",
")",
":",
"return",
"raw_url_or_path",
"if",
"(",
"is_scheme",
"is",
"not",
"False",
")",
":",
"if",
"(",
"_raw_url_or_path",
"[",
":",
"2",
"]",
"==",
"'//'",
")",
":",
"our_prefix",
"=",
"(",
"'//'",
"+",
"my_host_name",
")",
"elif",
"(",
"is_scheme",
"or",
"sp",
".",
"scheme",
")",
":",
"our_prefix",
"=",
"myurl_prefix",
"else",
":",
"our_prefix",
"=",
"''",
"else",
":",
"our_prefix",
"=",
"''",
"if",
"is_external_domain",
"(",
"domain",
")",
":",
"middle_part",
"=",
"(",
"'/extdomains/'",
"+",
"domain",
")",
"else",
":",
"middle_part",
"=",
"''",
"result",
"=",
"urljoin",
"(",
"(",
"(",
"our_prefix",
"+",
"middle_part",
")",
"+",
"'/'",
")",
",",
"extract_url_path_and_query",
"(",
"_raw_url_or_path",
")",
".",
"lstrip",
"(",
"'/'",
")",
")",
"if",
"is_escape",
":",
"result",
"=",
"s_esc",
"(",
"result",
")",
"return",
"result"
] |
convert url from remote to mirror url :type raw_url_or_path: str :type remote_domain: str :type is_scheme: bool :type is_escape: bool :rtype: str .
|
train
| false
|
52,511
|
def format_class(name, class_, docstring=None):
template = '.. py:class:: ckan.plugins.toolkit.{cls}\n\n{docstring}\n\n'
docstring = (docstring or inspect.getdoc(class_))
if (docstring is None):
docstring = ''
else:
docstring = '\n'.join([(' ' + line) for line in docstring.split('\n')])
return template.format(cls=name, docstring=docstring)
|
[
"def",
"format_class",
"(",
"name",
",",
"class_",
",",
"docstring",
"=",
"None",
")",
":",
"template",
"=",
"'.. py:class:: ckan.plugins.toolkit.{cls}\\n\\n{docstring}\\n\\n'",
"docstring",
"=",
"(",
"docstring",
"or",
"inspect",
".",
"getdoc",
"(",
"class_",
")",
")",
"if",
"(",
"docstring",
"is",
"None",
")",
":",
"docstring",
"=",
"''",
"else",
":",
"docstring",
"=",
"'\\n'",
".",
"join",
"(",
"[",
"(",
"' '",
"+",
"line",
")",
"for",
"line",
"in",
"docstring",
".",
"split",
"(",
"'\\n'",
")",
"]",
")",
"return",
"template",
".",
"format",
"(",
"cls",
"=",
"name",
",",
"docstring",
"=",
"docstring",
")"
] |
return a sphinx .
|
train
| false
|
52,512
|
def add_strategy(algos, settings={}):
rst = _simulator.add_comb(algos, settings)
settings.clear()
return rst
|
[
"def",
"add_strategy",
"(",
"algos",
",",
"settings",
"=",
"{",
"}",
")",
":",
"rst",
"=",
"_simulator",
".",
"add_comb",
"(",
"algos",
",",
"settings",
")",
"settings",
".",
"clear",
"(",
")",
"return",
"rst"
] |
args: algos : 一个策略组合 returns: profile .
|
train
| false
|
52,516
|
def get_all_nexusport_bindings():
LOG.debug(_('get_all_nexusport_bindings() called'))
session = db.get_session()
try:
bindings = session.query(nexus_models_v2.NexusPortBinding).all()
return bindings
except exc.NoResultFound:
return []
|
[
"def",
"get_all_nexusport_bindings",
"(",
")",
":",
"LOG",
".",
"debug",
"(",
"_",
"(",
"'get_all_nexusport_bindings() called'",
")",
")",
"session",
"=",
"db",
".",
"get_session",
"(",
")",
"try",
":",
"bindings",
"=",
"session",
".",
"query",
"(",
"nexus_models_v2",
".",
"NexusPortBinding",
")",
".",
"all",
"(",
")",
"return",
"bindings",
"except",
"exc",
".",
"NoResultFound",
":",
"return",
"[",
"]"
] |
lists all the nexusport bindings .
|
train
| false
|
52,517
|
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands(u'me')
@sopel.module.priority(u'low')
def me(bot, trigger):
if (trigger.group(2) is None):
return
(channel, _sep, action) = trigger.group(2).partition(u' ')
action = action.strip()
if ((not channel) or (not action)):
return
msg = (u'\x01ACTION %s\x01' % action)
bot.msg(channel, msg)
|
[
"@",
"sopel",
".",
"module",
".",
"require_privmsg",
"@",
"sopel",
".",
"module",
".",
"require_admin",
"@",
"sopel",
".",
"module",
".",
"commands",
"(",
"u'me'",
")",
"@",
"sopel",
".",
"module",
".",
"priority",
"(",
"u'low'",
")",
"def",
"me",
"(",
"bot",
",",
"trigger",
")",
":",
"if",
"(",
"trigger",
".",
"group",
"(",
"2",
")",
"is",
"None",
")",
":",
"return",
"(",
"channel",
",",
"_sep",
",",
"action",
")",
"=",
"trigger",
".",
"group",
"(",
"2",
")",
".",
"partition",
"(",
"u' '",
")",
"action",
"=",
"action",
".",
"strip",
"(",
")",
"if",
"(",
"(",
"not",
"channel",
")",
"or",
"(",
"not",
"action",
")",
")",
":",
"return",
"msg",
"=",
"(",
"u'\\x01ACTION %s\\x01'",
"%",
"action",
")",
"bot",
".",
"msg",
"(",
"channel",
",",
"msg",
")"
] |
send an action to a given channel or nick .
|
train
| false
|
52,518
|
def ePutS(Handle, pIOType, Channel, Value, x1):
if (os.name == 'nt'):
staticLib = ctypes.windll.LoadLibrary('labjackud')
pv = ctypes.c_double(Value)
ec = staticLib.ePutS(Handle, pIOType, Channel, pv, x1)
if (ec != 0):
raise LabJackException(ec)
else:
raise LabJackException(0, 'Function only supported for Windows')
|
[
"def",
"ePutS",
"(",
"Handle",
",",
"pIOType",
",",
"Channel",
",",
"Value",
",",
"x1",
")",
":",
"if",
"(",
"os",
".",
"name",
"==",
"'nt'",
")",
":",
"staticLib",
"=",
"ctypes",
".",
"windll",
".",
"LoadLibrary",
"(",
"'labjackud'",
")",
"pv",
"=",
"ctypes",
".",
"c_double",
"(",
"Value",
")",
"ec",
"=",
"staticLib",
".",
"ePutS",
"(",
"Handle",
",",
"pIOType",
",",
"Channel",
",",
"pv",
",",
"x1",
")",
"if",
"(",
"ec",
"!=",
"0",
")",
":",
"raise",
"LabJackException",
"(",
"ec",
")",
"else",
":",
"raise",
"LabJackException",
"(",
"0",
",",
"'Function only supported for Windows'",
")"
] |
put one value to the labjack device eput is equivilent to an addrequest followed by a goone .
|
train
| false
|
52,519
|
def rm_job(user, path, mask, cmd):
mask = str(mask).upper()
for item in mask.split(','):
if (item not in _MASK_TYPES):
return 'Invalid mask type: {0}'.format(item)
lst = list_tab(user)
ret = 'absent'
rm_ = None
for ind in range(len(lst['crons'])):
if (rm_ is not None):
break
if (path == lst['crons'][ind]['path']):
if (cmd == lst['crons'][ind]['cmd']):
if (mask == lst['crons'][ind]['mask']):
rm_ = ind
if (rm_ is not None):
lst['crons'].pop(rm_)
ret = 'removed'
comdat = _write_incron_lines(user, _render_tab(lst))
if comdat['retcode']:
return comdat['stderr']
return ret
|
[
"def",
"rm_job",
"(",
"user",
",",
"path",
",",
"mask",
",",
"cmd",
")",
":",
"mask",
"=",
"str",
"(",
"mask",
")",
".",
"upper",
"(",
")",
"for",
"item",
"in",
"mask",
".",
"split",
"(",
"','",
")",
":",
"if",
"(",
"item",
"not",
"in",
"_MASK_TYPES",
")",
":",
"return",
"'Invalid mask type: {0}'",
".",
"format",
"(",
"item",
")",
"lst",
"=",
"list_tab",
"(",
"user",
")",
"ret",
"=",
"'absent'",
"rm_",
"=",
"None",
"for",
"ind",
"in",
"range",
"(",
"len",
"(",
"lst",
"[",
"'crons'",
"]",
")",
")",
":",
"if",
"(",
"rm_",
"is",
"not",
"None",
")",
":",
"break",
"if",
"(",
"path",
"==",
"lst",
"[",
"'crons'",
"]",
"[",
"ind",
"]",
"[",
"'path'",
"]",
")",
":",
"if",
"(",
"cmd",
"==",
"lst",
"[",
"'crons'",
"]",
"[",
"ind",
"]",
"[",
"'cmd'",
"]",
")",
":",
"if",
"(",
"mask",
"==",
"lst",
"[",
"'crons'",
"]",
"[",
"ind",
"]",
"[",
"'mask'",
"]",
")",
":",
"rm_",
"=",
"ind",
"if",
"(",
"rm_",
"is",
"not",
"None",
")",
":",
"lst",
"[",
"'crons'",
"]",
".",
"pop",
"(",
"rm_",
")",
"ret",
"=",
"'removed'",
"comdat",
"=",
"_write_incron_lines",
"(",
"user",
",",
"_render_tab",
"(",
"lst",
")",
")",
"if",
"comdat",
"[",
"'retcode'",
"]",
":",
"return",
"comdat",
"[",
"'stderr'",
"]",
"return",
"ret"
] |
remove a incron job for a specified user .
|
train
| true
|
52,521
|
def _silent_no_wrap(func, modname):
return func(modname)
|
[
"def",
"_silent_no_wrap",
"(",
"func",
",",
"modname",
")",
":",
"return",
"func",
"(",
"modname",
")"
] |
silent wrapper that doesnt do anything; can be used for tests .
|
train
| false
|
52,524
|
def natural_sorted(iterable):
def sortkey(x):
return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)]
numbers = re.compile('(\\d+)')
return sorted(iterable, key=sortkey)
|
[
"def",
"natural_sorted",
"(",
"iterable",
")",
":",
"def",
"sortkey",
"(",
"x",
")",
":",
"return",
"[",
"(",
"int",
"(",
"c",
")",
"if",
"c",
".",
"isdigit",
"(",
")",
"else",
"c",
")",
"for",
"c",
"in",
"re",
".",
"split",
"(",
"numbers",
",",
"x",
")",
"]",
"numbers",
"=",
"re",
".",
"compile",
"(",
"'(\\\\d+)'",
")",
"return",
"sorted",
"(",
"iterable",
",",
"key",
"=",
"sortkey",
")"
] |
return human sorted list of strings .
|
train
| true
|
52,525
|
def addPrefixDictionary(dictionary, keys, value):
for key in keys:
dictionary[key.lstrip('_')] = value
|
[
"def",
"addPrefixDictionary",
"(",
"dictionary",
",",
"keys",
",",
"value",
")",
":",
"for",
"key",
"in",
"keys",
":",
"dictionary",
"[",
"key",
".",
"lstrip",
"(",
"'_'",
")",
"]",
"=",
"value"
] |
add prefixed key values to dictionary .
|
train
| false
|
52,526
|
def action_initdb():
make_wiki().init_database()
|
[
"def",
"action_initdb",
"(",
")",
":",
"make_wiki",
"(",
")",
".",
"init_database",
"(",
")"
] |
initialize the database .
|
train
| false
|
52,527
|
@pytest.mark.skipif(u'not HAS_SCIPY')
def test_poisson_conf_frequentist_confidence_gehrels_3sigma():
nlh = np.array([(0, 3, 0, 6.608), (1, 3, 0.00135, 8.9), (2, 3, 0.0529, 10.87), (3, 3, 0.212, 12.68), (4, 3, 0.465, 14.39), (5, 3, 0.792, 16.03), (6, 3, 1.175, 17.62), (7, 3, 1.603, 19.17), (8, 3, 2.068, 20.69), (9, 3, 2.563, 22.18), (10, 3, 3.084, 23.64)])
assert_allclose(funcs.poisson_conf_interval(nlh[:, 0], sigma=3, interval=u'frequentist-confidence').T, nlh[:, 2:], rtol=0.01, verbose=True)
|
[
"@",
"pytest",
".",
"mark",
".",
"skipif",
"(",
"u'not HAS_SCIPY'",
")",
"def",
"test_poisson_conf_frequentist_confidence_gehrels_3sigma",
"(",
")",
":",
"nlh",
"=",
"np",
".",
"array",
"(",
"[",
"(",
"0",
",",
"3",
",",
"0",
",",
"6.608",
")",
",",
"(",
"1",
",",
"3",
",",
"0.00135",
",",
"8.9",
")",
",",
"(",
"2",
",",
"3",
",",
"0.0529",
",",
"10.87",
")",
",",
"(",
"3",
",",
"3",
",",
"0.212",
",",
"12.68",
")",
",",
"(",
"4",
",",
"3",
",",
"0.465",
",",
"14.39",
")",
",",
"(",
"5",
",",
"3",
",",
"0.792",
",",
"16.03",
")",
",",
"(",
"6",
",",
"3",
",",
"1.175",
",",
"17.62",
")",
",",
"(",
"7",
",",
"3",
",",
"1.603",
",",
"19.17",
")",
",",
"(",
"8",
",",
"3",
",",
"2.068",
",",
"20.69",
")",
",",
"(",
"9",
",",
"3",
",",
"2.563",
",",
"22.18",
")",
",",
"(",
"10",
",",
"3",
",",
"3.084",
",",
"23.64",
")",
"]",
")",
"assert_allclose",
"(",
"funcs",
".",
"poisson_conf_interval",
"(",
"nlh",
"[",
":",
",",
"0",
"]",
",",
"sigma",
"=",
"3",
",",
"interval",
"=",
"u'frequentist-confidence'",
")",
".",
"T",
",",
"nlh",
"[",
":",
",",
"2",
":",
"]",
",",
"rtol",
"=",
"0.01",
",",
"verbose",
"=",
"True",
")"
] |
test intervals against those published in gehrels 1986 .
|
train
| false
|
52,532
|
def _dump_date(d, delim):
if (d is None):
d = gmtime()
elif isinstance(d, datetime):
d = d.utctimetuple()
elif isinstance(d, (integer_types, float)):
d = gmtime(d)
return ('%s, %02d%s%s%s%s %02d:%02d:%02d GMT' % (('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')[d.tm_wday], d.tm_mday, delim, ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')[(d.tm_mon - 1)], delim, str(d.tm_year), d.tm_hour, d.tm_min, d.tm_sec))
|
[
"def",
"_dump_date",
"(",
"d",
",",
"delim",
")",
":",
"if",
"(",
"d",
"is",
"None",
")",
":",
"d",
"=",
"gmtime",
"(",
")",
"elif",
"isinstance",
"(",
"d",
",",
"datetime",
")",
":",
"d",
"=",
"d",
".",
"utctimetuple",
"(",
")",
"elif",
"isinstance",
"(",
"d",
",",
"(",
"integer_types",
",",
"float",
")",
")",
":",
"d",
"=",
"gmtime",
"(",
"d",
")",
"return",
"(",
"'%s, %02d%s%s%s%s %02d:%02d:%02d GMT'",
"%",
"(",
"(",
"'Mon'",
",",
"'Tue'",
",",
"'Wed'",
",",
"'Thu'",
",",
"'Fri'",
",",
"'Sat'",
",",
"'Sun'",
")",
"[",
"d",
".",
"tm_wday",
"]",
",",
"d",
".",
"tm_mday",
",",
"delim",
",",
"(",
"'Jan'",
",",
"'Feb'",
",",
"'Mar'",
",",
"'Apr'",
",",
"'May'",
",",
"'Jun'",
",",
"'Jul'",
",",
"'Aug'",
",",
"'Sep'",
",",
"'Oct'",
",",
"'Nov'",
",",
"'Dec'",
")",
"[",
"(",
"d",
".",
"tm_mon",
"-",
"1",
")",
"]",
",",
"delim",
",",
"str",
"(",
"d",
".",
"tm_year",
")",
",",
"d",
".",
"tm_hour",
",",
"d",
".",
"tm_min",
",",
"d",
".",
"tm_sec",
")",
")"
] |
used for http_date and cookie_date .
|
train
| true
|
52,533
|
def APO(ds, count, fastperiod=(- (2 ** 31)), slowperiod=(- (2 ** 31)), matype=0):
return call_talib_with_ds(ds, count, talib.APO, fastperiod, slowperiod, matype)
|
[
"def",
"APO",
"(",
"ds",
",",
"count",
",",
"fastperiod",
"=",
"(",
"-",
"(",
"2",
"**",
"31",
")",
")",
",",
"slowperiod",
"=",
"(",
"-",
"(",
"2",
"**",
"31",
")",
")",
",",
"matype",
"=",
"0",
")",
":",
"return",
"call_talib_with_ds",
"(",
"ds",
",",
"count",
",",
"talib",
".",
"APO",
",",
"fastperiod",
",",
"slowperiod",
",",
"matype",
")"
] |
absolute price oscillator .
|
train
| false
|
52,534
|
def run_experiments():
shallow()
basic_conv()
omit_FC()
dbl_conv(activation_fn=sigmoid)
dbl_conv_relu()
expanded_data(n=100)
expanded_data(n=300)
expanded_data(n=1000)
expanded_data_double_fc(n=100)
expanded_data_double_fc(n=300)
expanded_data_double_fc(n=1000)
nets = double_fc_dropout(0.5, 0.5, 0.5, 5)
(error_locations, erroneous_predictions) = ensemble(nets)
plt = plot_errors(error_locations, erroneous_predictions)
plt.savefig('ensemble_errors.png')
plt = plot_filters(nets[0], 0, 5, 4)
plt.savefig('net_full_layer_0.png')
plt = plot_filters(nets[0], 1, 8, 5)
plt.savefig('net_full_layer_1.png')
|
[
"def",
"run_experiments",
"(",
")",
":",
"shallow",
"(",
")",
"basic_conv",
"(",
")",
"omit_FC",
"(",
")",
"dbl_conv",
"(",
"activation_fn",
"=",
"sigmoid",
")",
"dbl_conv_relu",
"(",
")",
"expanded_data",
"(",
"n",
"=",
"100",
")",
"expanded_data",
"(",
"n",
"=",
"300",
")",
"expanded_data",
"(",
"n",
"=",
"1000",
")",
"expanded_data_double_fc",
"(",
"n",
"=",
"100",
")",
"expanded_data_double_fc",
"(",
"n",
"=",
"300",
")",
"expanded_data_double_fc",
"(",
"n",
"=",
"1000",
")",
"nets",
"=",
"double_fc_dropout",
"(",
"0.5",
",",
"0.5",
",",
"0.5",
",",
"5",
")",
"(",
"error_locations",
",",
"erroneous_predictions",
")",
"=",
"ensemble",
"(",
"nets",
")",
"plt",
"=",
"plot_errors",
"(",
"error_locations",
",",
"erroneous_predictions",
")",
"plt",
".",
"savefig",
"(",
"'ensemble_errors.png'",
")",
"plt",
"=",
"plot_filters",
"(",
"nets",
"[",
"0",
"]",
",",
"0",
",",
"5",
",",
"4",
")",
"plt",
".",
"savefig",
"(",
"'net_full_layer_0.png'",
")",
"plt",
"=",
"plot_filters",
"(",
"nets",
"[",
"0",
"]",
",",
"1",
",",
"8",
",",
"5",
")",
"plt",
".",
"savefig",
"(",
"'net_full_layer_1.png'",
")"
] |
run the experiments described in the book .
|
train
| false
|
52,535
|
def enabled_quotas(request):
return (set(QUOTA_FIELDS) - get_disabled_quotas(request))
|
[
"def",
"enabled_quotas",
"(",
"request",
")",
":",
"return",
"(",
"set",
"(",
"QUOTA_FIELDS",
")",
"-",
"get_disabled_quotas",
"(",
"request",
")",
")"
] |
returns the list of quotas available minus those that are disabled .
|
train
| false
|
52,536
|
def encode_tbs(val, extra, flag_size=4):
flags = 0
for flag in extra:
flags |= flag
ans = encode_fvwi(val, flags, flag_size=flag_size)
if (2 in extra):
ans += encint(extra[2])
if (4 in extra):
ans += bytes(bytearray([extra[4]]))
if (1 in extra):
ans += encint(extra[1])
return ans
|
[
"def",
"encode_tbs",
"(",
"val",
",",
"extra",
",",
"flag_size",
"=",
"4",
")",
":",
"flags",
"=",
"0",
"for",
"flag",
"in",
"extra",
":",
"flags",
"|=",
"flag",
"ans",
"=",
"encode_fvwi",
"(",
"val",
",",
"flags",
",",
"flag_size",
"=",
"flag_size",
")",
"if",
"(",
"2",
"in",
"extra",
")",
":",
"ans",
"+=",
"encint",
"(",
"extra",
"[",
"2",
"]",
")",
"if",
"(",
"4",
"in",
"extra",
")",
":",
"ans",
"+=",
"bytes",
"(",
"bytearray",
"(",
"[",
"extra",
"[",
"4",
"]",
"]",
")",
")",
"if",
"(",
"1",
"in",
"extra",
")",
":",
"ans",
"+=",
"encint",
"(",
"extra",
"[",
"1",
"]",
")",
"return",
"ans"
] |
encode the number val and the extra data in the extra dict as an fvwi .
|
train
| false
|
52,537
|
def getfilesystemencoding():
encoding = sys.getfilesystemencoding()
if (encoding is None):
encoding = PREFERRED_ENCODING
return encoding
|
[
"def",
"getfilesystemencoding",
"(",
")",
":",
"encoding",
"=",
"sys",
".",
"getfilesystemencoding",
"(",
")",
"if",
"(",
"encoding",
"is",
"None",
")",
":",
"encoding",
"=",
"PREFERRED_ENCODING",
"return",
"encoding"
] |
query the filesystem for the encoding used to encode filenames and environment variables .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.