id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
11,251
|
def _list_snapshots_command(filesystem):
return ['list', '-H', '-r', '-t', 'snapshot', '-o', 'name', '-s', 'creation', filesystem.name]
|
[
"def",
"_list_snapshots_command",
"(",
"filesystem",
")",
":",
"return",
"[",
"'list'",
",",
"'-H'",
",",
"'-r'",
",",
"'-t'",
",",
"'snapshot'",
",",
"'-o'",
",",
"'name'",
",",
"'-s'",
",",
"'creation'",
",",
"filesystem",
".",
"name",
"]"
] |
construct a zfs command which will output the names of the snapshots of the given filesystem .
|
train
| false
|
11,252
|
def architecture(executable=sys.executable, bits='', linkage=''):
if (not bits):
import struct
try:
size = struct.calcsize('P')
except struct.error:
size = struct.calcsize('l')
bits = (str((size * 8)) + 'bit')
if executable:
output = _syscmd_file(executable, '')
else:
output = ''
if ((not output) and (executable == sys.executable)):
if (sys.platform in _default_architecture):
(b, l) = _default_architecture[sys.platform]
if b:
bits = b
if l:
linkage = l
return (bits, linkage)
fileout = _architecture_split(output)[1:]
if ('executable' not in fileout):
return (bits, linkage)
if ('32-bit' in fileout):
bits = '32bit'
elif ('N32' in fileout):
bits = 'n32bit'
elif ('64-bit' in fileout):
bits = '64bit'
if ('ELF' in fileout):
linkage = 'ELF'
elif ('PE' in fileout):
if ('Windows' in fileout):
linkage = 'WindowsPE'
else:
linkage = 'PE'
elif ('COFF' in fileout):
linkage = 'COFF'
elif ('MS-DOS' in fileout):
linkage = 'MSDOS'
else:
pass
return (bits, linkage)
|
[
"def",
"architecture",
"(",
"executable",
"=",
"sys",
".",
"executable",
",",
"bits",
"=",
"''",
",",
"linkage",
"=",
"''",
")",
":",
"if",
"(",
"not",
"bits",
")",
":",
"import",
"struct",
"try",
":",
"size",
"=",
"struct",
".",
"calcsize",
"(",
"'P'",
")",
"except",
"struct",
".",
"error",
":",
"size",
"=",
"struct",
".",
"calcsize",
"(",
"'l'",
")",
"bits",
"=",
"(",
"str",
"(",
"(",
"size",
"*",
"8",
")",
")",
"+",
"'bit'",
")",
"if",
"executable",
":",
"output",
"=",
"_syscmd_file",
"(",
"executable",
",",
"''",
")",
"else",
":",
"output",
"=",
"''",
"if",
"(",
"(",
"not",
"output",
")",
"and",
"(",
"executable",
"==",
"sys",
".",
"executable",
")",
")",
":",
"if",
"(",
"sys",
".",
"platform",
"in",
"_default_architecture",
")",
":",
"(",
"b",
",",
"l",
")",
"=",
"_default_architecture",
"[",
"sys",
".",
"platform",
"]",
"if",
"b",
":",
"bits",
"=",
"b",
"if",
"l",
":",
"linkage",
"=",
"l",
"return",
"(",
"bits",
",",
"linkage",
")",
"fileout",
"=",
"_architecture_split",
"(",
"output",
")",
"[",
"1",
":",
"]",
"if",
"(",
"'executable'",
"not",
"in",
"fileout",
")",
":",
"return",
"(",
"bits",
",",
"linkage",
")",
"if",
"(",
"'32-bit'",
"in",
"fileout",
")",
":",
"bits",
"=",
"'32bit'",
"elif",
"(",
"'N32'",
"in",
"fileout",
")",
":",
"bits",
"=",
"'n32bit'",
"elif",
"(",
"'64-bit'",
"in",
"fileout",
")",
":",
"bits",
"=",
"'64bit'",
"if",
"(",
"'ELF'",
"in",
"fileout",
")",
":",
"linkage",
"=",
"'ELF'",
"elif",
"(",
"'PE'",
"in",
"fileout",
")",
":",
"if",
"(",
"'Windows'",
"in",
"fileout",
")",
":",
"linkage",
"=",
"'WindowsPE'",
"else",
":",
"linkage",
"=",
"'PE'",
"elif",
"(",
"'COFF'",
"in",
"fileout",
")",
":",
"linkage",
"=",
"'COFF'",
"elif",
"(",
"'MS-DOS'",
"in",
"fileout",
")",
":",
"linkage",
"=",
"'MSDOS'",
"else",
":",
"pass",
"return",
"(",
"bits",
",",
"linkage",
")"
] |
returns the bit depth of the python interpreters architecture as a string .
|
train
| false
|
11,254
|
def rigid_alignment(faces, path, plotflag=False):
refpoints = faces.values()[0]
for face in faces:
points = faces[face]
(R, tx, ty) = compute_rigid_transform(refpoints, points)
T = array([[R[1][1], R[1][0]], [R[0][1], R[0][0]]])
im = array(Image.open(os.path.join(path, face)))
im2 = zeros(im.shape, 'uint8')
for i in range(len(im.shape)):
im2[:, :, i] = ndimage.affine_transform(im[:, :, i], linalg.inv(T), offset=[(- ty), (- tx)])
if plotflag:
imshow(im2)
show()
(h, w) = im2.shape[:2]
border = ((w + h) / 20)
imsave(os.path.join(path, ('aligned/' + face)), im2[border:(h - border), border:(w - border), :])
|
[
"def",
"rigid_alignment",
"(",
"faces",
",",
"path",
",",
"plotflag",
"=",
"False",
")",
":",
"refpoints",
"=",
"faces",
".",
"values",
"(",
")",
"[",
"0",
"]",
"for",
"face",
"in",
"faces",
":",
"points",
"=",
"faces",
"[",
"face",
"]",
"(",
"R",
",",
"tx",
",",
"ty",
")",
"=",
"compute_rigid_transform",
"(",
"refpoints",
",",
"points",
")",
"T",
"=",
"array",
"(",
"[",
"[",
"R",
"[",
"1",
"]",
"[",
"1",
"]",
",",
"R",
"[",
"1",
"]",
"[",
"0",
"]",
"]",
",",
"[",
"R",
"[",
"0",
"]",
"[",
"1",
"]",
",",
"R",
"[",
"0",
"]",
"[",
"0",
"]",
"]",
"]",
")",
"im",
"=",
"array",
"(",
"Image",
".",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"face",
")",
")",
")",
"im2",
"=",
"zeros",
"(",
"im",
".",
"shape",
",",
"'uint8'",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"im",
".",
"shape",
")",
")",
":",
"im2",
"[",
":",
",",
":",
",",
"i",
"]",
"=",
"ndimage",
".",
"affine_transform",
"(",
"im",
"[",
":",
",",
":",
",",
"i",
"]",
",",
"linalg",
".",
"inv",
"(",
"T",
")",
",",
"offset",
"=",
"[",
"(",
"-",
"ty",
")",
",",
"(",
"-",
"tx",
")",
"]",
")",
"if",
"plotflag",
":",
"imshow",
"(",
"im2",
")",
"show",
"(",
")",
"(",
"h",
",",
"w",
")",
"=",
"im2",
".",
"shape",
"[",
":",
"2",
"]",
"border",
"=",
"(",
"(",
"w",
"+",
"h",
")",
"/",
"20",
")",
"imsave",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"(",
"'aligned/'",
"+",
"face",
")",
")",
",",
"im2",
"[",
"border",
":",
"(",
"h",
"-",
"border",
")",
",",
"border",
":",
"(",
"w",
"-",
"border",
")",
",",
":",
"]",
")"
] |
align images rigidly and save as new images .
|
train
| false
|
11,255
|
def addMeldedPillarByLoops(faces, indexedLoops):
if (len(indexedLoops) < 1):
return
if (len(indexedLoops[(-1)]) < 1):
addFacesByMeldedConvexLoops(faces, indexedLoops)
return
addFacesByLoopReversed(faces, indexedLoops[0])
addFacesByMeldedConvexLoops(faces, indexedLoops)
addFacesByLoop(faces, indexedLoops[(-1)])
|
[
"def",
"addMeldedPillarByLoops",
"(",
"faces",
",",
"indexedLoops",
")",
":",
"if",
"(",
"len",
"(",
"indexedLoops",
")",
"<",
"1",
")",
":",
"return",
"if",
"(",
"len",
"(",
"indexedLoops",
"[",
"(",
"-",
"1",
")",
"]",
")",
"<",
"1",
")",
":",
"addFacesByMeldedConvexLoops",
"(",
"faces",
",",
"indexedLoops",
")",
"return",
"addFacesByLoopReversed",
"(",
"faces",
",",
"indexedLoops",
"[",
"0",
"]",
")",
"addFacesByMeldedConvexLoops",
"(",
"faces",
",",
"indexedLoops",
")",
"addFacesByLoop",
"(",
"faces",
",",
"indexedLoops",
"[",
"(",
"-",
"1",
")",
"]",
")"
] |
add melded pillar by loops which may be concave .
|
train
| false
|
11,256
|
def new_class(name, bases=(), kwds=None, exec_body=None):
(meta, ns, kwds) = prepare_class(name, bases, kwds)
if (exec_body is not None):
exec_body(ns)
return meta(name, bases, ns, **kwds)
|
[
"def",
"new_class",
"(",
"name",
",",
"bases",
"=",
"(",
")",
",",
"kwds",
"=",
"None",
",",
"exec_body",
"=",
"None",
")",
":",
"(",
"meta",
",",
"ns",
",",
"kwds",
")",
"=",
"prepare_class",
"(",
"name",
",",
"bases",
",",
"kwds",
")",
"if",
"(",
"exec_body",
"is",
"not",
"None",
")",
":",
"exec_body",
"(",
"ns",
")",
"return",
"meta",
"(",
"name",
",",
"bases",
",",
"ns",
",",
"**",
"kwds",
")"
] |
create a class object dynamically using the appropriate metaclass .
|
train
| false
|
11,258
|
def _AppendFirstLeafTokenSubtype(node, subtype):
if isinstance(node, pytree.Leaf):
_AppendTokenSubtype(node, subtype)
return
_AppendFirstLeafTokenSubtype(node.children[0], subtype)
|
[
"def",
"_AppendFirstLeafTokenSubtype",
"(",
"node",
",",
"subtype",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"pytree",
".",
"Leaf",
")",
":",
"_AppendTokenSubtype",
"(",
"node",
",",
"subtype",
")",
"return",
"_AppendFirstLeafTokenSubtype",
"(",
"node",
".",
"children",
"[",
"0",
"]",
",",
"subtype",
")"
] |
append the first leaf tokens subtypes .
|
train
| false
|
11,259
|
@pytest.mark.parametrize(u'localhost', (u'localhost', u'127.0.0.1'))
def test_localconnect_succeeds(localhost):
httpd = StoppableHTTPServer((u'localhost', 0), SimpleHTTPServer.SimpleHTTPRequestHandler)
port = httpd.socket.getsockname()[1]
server = Thread(target=httpd.serve_forever)
server.setDaemon(True)
server.start()
time.sleep(0.1)
urlopen(u'http://{localhost:s}:{port:d}'.format(localhost=localhost, port=port)).close()
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"u'localhost'",
",",
"(",
"u'localhost'",
",",
"u'127.0.0.1'",
")",
")",
"def",
"test_localconnect_succeeds",
"(",
"localhost",
")",
":",
"httpd",
"=",
"StoppableHTTPServer",
"(",
"(",
"u'localhost'",
",",
"0",
")",
",",
"SimpleHTTPServer",
".",
"SimpleHTTPRequestHandler",
")",
"port",
"=",
"httpd",
".",
"socket",
".",
"getsockname",
"(",
")",
"[",
"1",
"]",
"server",
"=",
"Thread",
"(",
"target",
"=",
"httpd",
".",
"serve_forever",
")",
"server",
".",
"setDaemon",
"(",
"True",
")",
"server",
".",
"start",
"(",
")",
"time",
".",
"sleep",
"(",
"0.1",
")",
"urlopen",
"(",
"u'http://{localhost:s}:{port:d}'",
".",
"format",
"(",
"localhost",
"=",
"localhost",
",",
"port",
"=",
"port",
")",
")",
".",
"close",
"(",
")"
] |
ensure that connections to localhost are allowed .
|
train
| false
|
11,260
|
@parametrize('cls', tables.mapped_classes)
def test_nonzero_autoincrement_ids(session, cls):
if ('id' not in cls.__table__.c):
return
if (not cls.__table__.c.id.autoincrement):
return
try:
util.get(session, cls, id=0)
except NoResultFound:
pass
else:
pytest.fail(('No zero id in %s' % cls.__name__))
|
[
"@",
"parametrize",
"(",
"'cls'",
",",
"tables",
".",
"mapped_classes",
")",
"def",
"test_nonzero_autoincrement_ids",
"(",
"session",
",",
"cls",
")",
":",
"if",
"(",
"'id'",
"not",
"in",
"cls",
".",
"__table__",
".",
"c",
")",
":",
"return",
"if",
"(",
"not",
"cls",
".",
"__table__",
".",
"c",
".",
"id",
".",
"autoincrement",
")",
":",
"return",
"try",
":",
"util",
".",
"get",
"(",
"session",
",",
"cls",
",",
"id",
"=",
"0",
")",
"except",
"NoResultFound",
":",
"pass",
"else",
":",
"pytest",
".",
"fail",
"(",
"(",
"'No zero id in %s'",
"%",
"cls",
".",
"__name__",
")",
")"
] |
check that autoincrementing ids dont contain zeroes mysql doesnt like these .
|
train
| false
|
11,262
|
def _invalidate_edge(graph, n1, n2):
graph[n1][n2]['heap item'][3] = False
|
[
"def",
"_invalidate_edge",
"(",
"graph",
",",
"n1",
",",
"n2",
")",
":",
"graph",
"[",
"n1",
"]",
"[",
"n2",
"]",
"[",
"'heap item'",
"]",
"[",
"3",
"]",
"=",
"False"
] |
invalidates the edge in the heap .
|
train
| false
|
11,263
|
@register_jitable
def _get_rank_from_singular_values(sv, t):
rank = 0
for k in range(len(sv)):
if (sv[k] > t):
rank = (rank + 1)
else:
break
return rank
|
[
"@",
"register_jitable",
"def",
"_get_rank_from_singular_values",
"(",
"sv",
",",
"t",
")",
":",
"rank",
"=",
"0",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"sv",
")",
")",
":",
"if",
"(",
"sv",
"[",
"k",
"]",
">",
"t",
")",
":",
"rank",
"=",
"(",
"rank",
"+",
"1",
")",
"else",
":",
"break",
"return",
"rank"
] |
gets rank from singular values with cut-off at a given tolerance .
|
train
| false
|
11,265
|
def _paths_from_data(paths_data):
paths = []
for path_data in paths_data:
paths.append([PathItem(item['usage_key'], item['display_name']) for item in path_data if (item['usage_key'].block_type != 'course')])
return [path for path in paths if path]
|
[
"def",
"_paths_from_data",
"(",
"paths_data",
")",
":",
"paths",
"=",
"[",
"]",
"for",
"path_data",
"in",
"paths_data",
":",
"paths",
".",
"append",
"(",
"[",
"PathItem",
"(",
"item",
"[",
"'usage_key'",
"]",
",",
"item",
"[",
"'display_name'",
"]",
")",
"for",
"item",
"in",
"path_data",
"if",
"(",
"item",
"[",
"'usage_key'",
"]",
".",
"block_type",
"!=",
"'course'",
")",
"]",
")",
"return",
"[",
"path",
"for",
"path",
"in",
"paths",
"if",
"path",
"]"
] |
construct a list of paths from path data .
|
train
| false
|
11,267
|
def _encode_binary(name, value, dummy0, dummy1):
subtype = value.subtype
if (subtype == 2):
value = (_PACK_INT(len(value)) + value)
return ((('\x05' + name) + _PACK_LENGTH_SUBTYPE(len(value), subtype)) + value)
|
[
"def",
"_encode_binary",
"(",
"name",
",",
"value",
",",
"dummy0",
",",
"dummy1",
")",
":",
"subtype",
"=",
"value",
".",
"subtype",
"if",
"(",
"subtype",
"==",
"2",
")",
":",
"value",
"=",
"(",
"_PACK_INT",
"(",
"len",
"(",
"value",
")",
")",
"+",
"value",
")",
"return",
"(",
"(",
"(",
"'\\x05'",
"+",
"name",
")",
"+",
"_PACK_LENGTH_SUBTYPE",
"(",
"len",
"(",
"value",
")",
",",
"subtype",
")",
")",
"+",
"value",
")"
] |
encode bson .
|
train
| true
|
11,268
|
def has_team_api_access(user, course_key, access_username=None):
if user.is_staff:
return True
if CourseStaffRole(course_key).has_user(user):
return True
if has_discussion_privileges(user, course_key):
return True
if ((not access_username) or (access_username == user.username)):
return CourseEnrollment.is_enrolled(user, course_key)
return False
|
[
"def",
"has_team_api_access",
"(",
"user",
",",
"course_key",
",",
"access_username",
"=",
"None",
")",
":",
"if",
"user",
".",
"is_staff",
":",
"return",
"True",
"if",
"CourseStaffRole",
"(",
"course_key",
")",
".",
"has_user",
"(",
"user",
")",
":",
"return",
"True",
"if",
"has_discussion_privileges",
"(",
"user",
",",
"course_key",
")",
":",
"return",
"True",
"if",
"(",
"(",
"not",
"access_username",
")",
"or",
"(",
"access_username",
"==",
"user",
".",
"username",
")",
")",
":",
"return",
"CourseEnrollment",
".",
"is_enrolled",
"(",
"user",
",",
"course_key",
")",
"return",
"False"
] |
returns true if the user has access to the team api for the course given by course_key .
|
train
| false
|
11,270
|
def string_to_translatedfield_value(text):
guess = guess_language(text)
if guess:
lang = find_language(guess).lower()
if lang:
return {lang: text}
return {settings.SHORTER_LANGUAGES['en'].lower(): text}
|
[
"def",
"string_to_translatedfield_value",
"(",
"text",
")",
":",
"guess",
"=",
"guess_language",
"(",
"text",
")",
"if",
"guess",
":",
"lang",
"=",
"find_language",
"(",
"guess",
")",
".",
"lower",
"(",
")",
"if",
"lang",
":",
"return",
"{",
"lang",
":",
"text",
"}",
"return",
"{",
"settings",
".",
"SHORTER_LANGUAGES",
"[",
"'en'",
"]",
".",
"lower",
"(",
")",
":",
"text",
"}"
] |
passed a string .
|
train
| false
|
11,272
|
def rm_dns(ip, interface='Local Area Connection'):
cmd = ['netsh', 'interface', 'ip', 'delete', 'dns', interface, ip, 'validate=no']
return (__salt__['cmd.retcode'](cmd, python_shell=False) == 0)
|
[
"def",
"rm_dns",
"(",
"ip",
",",
"interface",
"=",
"'Local Area Connection'",
")",
":",
"cmd",
"=",
"[",
"'netsh'",
",",
"'interface'",
",",
"'ip'",
",",
"'delete'",
",",
"'dns'",
",",
"interface",
",",
"ip",
",",
"'validate=no'",
"]",
"return",
"(",
"__salt__",
"[",
"'cmd.retcode'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"==",
"0",
")"
] |
remove the dns server from the network interface cli example: .
|
train
| true
|
11,273
|
def token_from_blob(blob):
parts = _split_token_parts(blob)
if (parts[0] == '1c'):
return ClientLoginToken(parts[1])
elif (parts[0] == '1a'):
return AuthSubToken(parts[1], parts[2:])
elif (parts[0] == '1s'):
return SecureAuthSubToken(parts[1], parts[2], parts[3:])
elif (parts[0] == '1rtl'):
return TwoLeggedOAuthRsaToken(parts[1], parts[2], parts[3])
elif (parts[0] == '1htl'):
return TwoLeggedOAuthHmacToken(parts[1], parts[2], parts[3])
elif (parts[0] == '1r'):
auth_state = int(parts[5])
return OAuthRsaToken(parts[1], parts[2], parts[3], parts[4], auth_state, parts[6], parts[7])
elif (parts[0] == '1h'):
auth_state = int(parts[5])
return OAuthHmacToken(parts[1], parts[2], parts[3], parts[4], auth_state, parts[6], parts[7])
elif (parts[0] == '2o'):
return OAuth2Token(parts[1], parts[2], parts[3], parts[4], parts[5], parts[6], parts[7], parts[8])
else:
raise UnsupportedTokenType(('Unable to deserialize token with type marker of %s' % parts[0]))
|
[
"def",
"token_from_blob",
"(",
"blob",
")",
":",
"parts",
"=",
"_split_token_parts",
"(",
"blob",
")",
"if",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1c'",
")",
":",
"return",
"ClientLoginToken",
"(",
"parts",
"[",
"1",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1a'",
")",
":",
"return",
"AuthSubToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
":",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1s'",
")",
":",
"return",
"SecureAuthSubToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
":",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1rtl'",
")",
":",
"return",
"TwoLeggedOAuthRsaToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1htl'",
")",
":",
"return",
"TwoLeggedOAuthHmacToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1r'",
")",
":",
"auth_state",
"=",
"int",
"(",
"parts",
"[",
"5",
"]",
")",
"return",
"OAuthRsaToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
"]",
",",
"parts",
"[",
"4",
"]",
",",
"auth_state",
",",
"parts",
"[",
"6",
"]",
",",
"parts",
"[",
"7",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'1h'",
")",
":",
"auth_state",
"=",
"int",
"(",
"parts",
"[",
"5",
"]",
")",
"return",
"OAuthHmacToken",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
"]",
",",
"parts",
"[",
"4",
"]",
",",
"auth_state",
",",
"parts",
"[",
"6",
"]",
",",
"parts",
"[",
"7",
"]",
")",
"elif",
"(",
"parts",
"[",
"0",
"]",
"==",
"'2o'",
")",
":",
"return",
"OAuth2Token",
"(",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
",",
"parts",
"[",
"3",
"]",
",",
"parts",
"[",
"4",
"]",
",",
"parts",
"[",
"5",
"]",
",",
"parts",
"[",
"6",
"]",
",",
"parts",
"[",
"7",
"]",
",",
"parts",
"[",
"8",
"]",
")",
"else",
":",
"raise",
"UnsupportedTokenType",
"(",
"(",
"'Unable to deserialize token with type marker of %s'",
"%",
"parts",
"[",
"0",
"]",
")",
")"
] |
deserializes a token string from the datastore back into a token object .
|
train
| false
|
11,274
|
def chebyshev(u, v):
u = _validate_vector(u)
v = _validate_vector(v)
return max(abs((u - v)))
|
[
"def",
"chebyshev",
"(",
"u",
",",
"v",
")",
":",
"u",
"=",
"_validate_vector",
"(",
"u",
")",
"v",
"=",
"_validate_vector",
"(",
"v",
")",
"return",
"max",
"(",
"abs",
"(",
"(",
"u",
"-",
"v",
")",
")",
")"
] |
computes the chebyshev distance .
|
train
| false
|
11,275
|
def get_block_structure_manager(course_key):
store = modulestore()
course_usage_key = store.make_course_usage_key(course_key)
return BlockStructureManager(course_usage_key, store, get_cache())
|
[
"def",
"get_block_structure_manager",
"(",
"course_key",
")",
":",
"store",
"=",
"modulestore",
"(",
")",
"course_usage_key",
"=",
"store",
".",
"make_course_usage_key",
"(",
"course_key",
")",
"return",
"BlockStructureManager",
"(",
"course_usage_key",
",",
"store",
",",
"get_cache",
"(",
")",
")"
] |
returns the manager for managing block structures for the given course .
|
train
| false
|
11,276
|
@treeio_login_required
def ajax_access_lookup(request, response_format='html'):
entities = []
if (request.GET and ('term' in request.GET)):
entities = AccessEntity.objects.filter(((Q(user__name__icontains=request.GET['term']) | Q(user__contact__name__icontains=request.GET['term'])) | Q(group__name__icontains=request.GET['term'])))
return render_to_response('identities/ajax_access_lookup', {'entities': entities}, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"treeio_login_required",
"def",
"ajax_access_lookup",
"(",
"request",
",",
"response_format",
"=",
"'html'",
")",
":",
"entities",
"=",
"[",
"]",
"if",
"(",
"request",
".",
"GET",
"and",
"(",
"'term'",
"in",
"request",
".",
"GET",
")",
")",
":",
"entities",
"=",
"AccessEntity",
".",
"objects",
".",
"filter",
"(",
"(",
"(",
"Q",
"(",
"user__name__icontains",
"=",
"request",
".",
"GET",
"[",
"'term'",
"]",
")",
"|",
"Q",
"(",
"user__contact__name__icontains",
"=",
"request",
".",
"GET",
"[",
"'term'",
"]",
")",
")",
"|",
"Q",
"(",
"group__name__icontains",
"=",
"request",
".",
"GET",
"[",
"'term'",
"]",
")",
")",
")",
"return",
"render_to_response",
"(",
"'identities/ajax_access_lookup'",
",",
"{",
"'entities'",
":",
"entities",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
returns a list of matching users .
|
train
| false
|
11,277
|
def _compare_policy(current, desired, region, key, keyid, profile):
if isinstance(desired, string_types):
desired = json.loads(desired)
if (current is not None):
temp = current.get('Policy')
if isinstance(temp, string_types):
current = {'Policy': json.loads(temp)}
else:
current = None
return __utils__['boto3.json_objs_equal'](current, desired)
|
[
"def",
"_compare_policy",
"(",
"current",
",",
"desired",
",",
"region",
",",
"key",
",",
"keyid",
",",
"profile",
")",
":",
"if",
"isinstance",
"(",
"desired",
",",
"string_types",
")",
":",
"desired",
"=",
"json",
".",
"loads",
"(",
"desired",
")",
"if",
"(",
"current",
"is",
"not",
"None",
")",
":",
"temp",
"=",
"current",
".",
"get",
"(",
"'Policy'",
")",
"if",
"isinstance",
"(",
"temp",
",",
"string_types",
")",
":",
"current",
"=",
"{",
"'Policy'",
":",
"json",
".",
"loads",
"(",
"temp",
")",
"}",
"else",
":",
"current",
"=",
"None",
"return",
"__utils__",
"[",
"'boto3.json_objs_equal'",
"]",
"(",
"current",
",",
"desired",
")"
] |
policy description is always returned as a json string .
|
train
| false
|
11,278
|
def get_checkbox(state):
return (u'<input type="checkbox" disabled%s> ' % (u' checked' if (state.lower() == u'x') else u''))
|
[
"def",
"get_checkbox",
"(",
"state",
")",
":",
"return",
"(",
"u'<input type=\"checkbox\" disabled%s> '",
"%",
"(",
"u' checked'",
"if",
"(",
"state",
".",
"lower",
"(",
")",
"==",
"u'x'",
")",
"else",
"u''",
")",
")"
] |
get checkbox tag .
|
train
| false
|
11,279
|
def run_this(cmd, args, logger):
my_cmd = (cmd % args)
rc = subprocess_call(logger, my_cmd, shell=True)
if (rc != 0):
die(logger, 'Command failed')
|
[
"def",
"run_this",
"(",
"cmd",
",",
"args",
",",
"logger",
")",
":",
"my_cmd",
"=",
"(",
"cmd",
"%",
"args",
")",
"rc",
"=",
"subprocess_call",
"(",
"logger",
",",
"my_cmd",
",",
"shell",
"=",
"True",
")",
"if",
"(",
"rc",
"!=",
"0",
")",
":",
"die",
"(",
"logger",
",",
"'Command failed'",
")"
] |
a simple wrapper around subprocess calls .
|
train
| false
|
11,280
|
def ZIP_Extract(zipfile, extraction_path, one_folder):
if (one_folder or cfg.flat_unpack()):
option = '-j'
else:
option = '-qq'
command = [('%s' % ZIP_COMMAND), '-o', '-qq', option, '-Pnone', ('%s' % zipfile), ('-d%s' % extraction_path)]
(stup, need_shell, command, creationflags) = build_command(command)
logging.debug('Starting unzip: %s', command)
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=stup, creationflags=creationflags)
output = p.stdout.read()
logging.debug('unzip output: %s', output)
ret = p.wait()
return ret
|
[
"def",
"ZIP_Extract",
"(",
"zipfile",
",",
"extraction_path",
",",
"one_folder",
")",
":",
"if",
"(",
"one_folder",
"or",
"cfg",
".",
"flat_unpack",
"(",
")",
")",
":",
"option",
"=",
"'-j'",
"else",
":",
"option",
"=",
"'-qq'",
"command",
"=",
"[",
"(",
"'%s'",
"%",
"ZIP_COMMAND",
")",
",",
"'-o'",
",",
"'-qq'",
",",
"option",
",",
"'-Pnone'",
",",
"(",
"'%s'",
"%",
"zipfile",
")",
",",
"(",
"'-d%s'",
"%",
"extraction_path",
")",
"]",
"(",
"stup",
",",
"need_shell",
",",
"command",
",",
"creationflags",
")",
"=",
"build_command",
"(",
"command",
")",
"logging",
".",
"debug",
"(",
"'Starting unzip: %s'",
",",
"command",
")",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"command",
",",
"shell",
"=",
"need_shell",
",",
"stdin",
"=",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
",",
"startupinfo",
"=",
"stup",
",",
"creationflags",
"=",
"creationflags",
")",
"output",
"=",
"p",
".",
"stdout",
".",
"read",
"(",
")",
"logging",
".",
"debug",
"(",
"'unzip output: %s'",
",",
"output",
")",
"ret",
"=",
"p",
".",
"wait",
"(",
")",
"return",
"ret"
] |
unzip single zip set zipfile to extraction_path .
|
train
| false
|
11,281
|
def test_rgb_to_hsl_part_6():
assert (rgb_to_hsl(0, 0, 255) == (240, 100, 50))
assert (rgb_to_hsl(51, 0, 255) == (252, 100, 50))
assert (rgb_to_hsl(102, 0, 255) == (264, 100, 50))
assert (rgb_to_hsl(153, 0, 255) == (276, 100, 50))
assert (rgb_to_hsl(204, 0, 255) == (288, 100, 50))
assert (rgb_to_hsl(255, 0, 255) == (300, 100, 50))
assert (rgb_to_hsl(255, 0, 204) == (312, 100, 50))
assert (rgb_to_hsl(255, 0, 153) == (324, 100, 50))
assert (rgb_to_hsl(255, 0, 102) == (336, 100, 50))
assert (rgb_to_hsl(255, 0, 51) == (348, 100, 50))
|
[
"def",
"test_rgb_to_hsl_part_6",
"(",
")",
":",
"assert",
"(",
"rgb_to_hsl",
"(",
"0",
",",
"0",
",",
"255",
")",
"==",
"(",
"240",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"51",
",",
"0",
",",
"255",
")",
"==",
"(",
"252",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"102",
",",
"0",
",",
"255",
")",
"==",
"(",
"264",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"153",
",",
"0",
",",
"255",
")",
"==",
"(",
"276",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"204",
",",
"0",
",",
"255",
")",
"==",
"(",
"288",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"255",
")",
"==",
"(",
"300",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"204",
")",
"==",
"(",
"312",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"153",
")",
"==",
"(",
"324",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"102",
")",
"==",
"(",
"336",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"51",
")",
"==",
"(",
"348",
",",
"100",
",",
"50",
")",
")"
] |
test rgb to hsl color function .
|
train
| false
|
11,283
|
def _get_packages(module, pip, chdir):
command = ('%s list' % pip)
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
(rc, out, err) = module.run_command(command, cwd=chdir, environ_update=lang_env)
if (rc != 0):
command = ('%s freeze' % pip)
(rc, out, err) = module.run_command(command, cwd=chdir)
if (rc != 0):
_fail(module, command, out, err)
return (command, out, err)
|
[
"def",
"_get_packages",
"(",
"module",
",",
"pip",
",",
"chdir",
")",
":",
"command",
"=",
"(",
"'%s list'",
"%",
"pip",
")",
"lang_env",
"=",
"dict",
"(",
"LANG",
"=",
"'C'",
",",
"LC_ALL",
"=",
"'C'",
",",
"LC_MESSAGES",
"=",
"'C'",
")",
"(",
"rc",
",",
"out",
",",
"err",
")",
"=",
"module",
".",
"run_command",
"(",
"command",
",",
"cwd",
"=",
"chdir",
",",
"environ_update",
"=",
"lang_env",
")",
"if",
"(",
"rc",
"!=",
"0",
")",
":",
"command",
"=",
"(",
"'%s freeze'",
"%",
"pip",
")",
"(",
"rc",
",",
"out",
",",
"err",
")",
"=",
"module",
".",
"run_command",
"(",
"command",
",",
"cwd",
"=",
"chdir",
")",
"if",
"(",
"rc",
"!=",
"0",
")",
":",
"_fail",
"(",
"module",
",",
"command",
",",
"out",
",",
"err",
")",
"return",
"(",
"command",
",",
"out",
",",
"err",
")"
] |
return results of pip command to get packages .
|
train
| false
|
11,284
|
def execute_command(cmd, args, globals_dict, locals_dict, cmdinfo):
global command_list
locals_dict['__cmd__'] = cmd
locals_dict['__args__'] = args
if (cmd not in command_list):
raise TwillNameError(("unknown twill command: '%s'" % (cmd,)))
eval_str = ('%s(*__args__)' % (cmd,))
codeobj = compile(eval_str, cmdinfo, 'eval')
result = eval(codeobj, globals_dict, locals_dict)
locals_dict['__url__'] = commands.browser.get_url()
return result
|
[
"def",
"execute_command",
"(",
"cmd",
",",
"args",
",",
"globals_dict",
",",
"locals_dict",
",",
"cmdinfo",
")",
":",
"global",
"command_list",
"locals_dict",
"[",
"'__cmd__'",
"]",
"=",
"cmd",
"locals_dict",
"[",
"'__args__'",
"]",
"=",
"args",
"if",
"(",
"cmd",
"not",
"in",
"command_list",
")",
":",
"raise",
"TwillNameError",
"(",
"(",
"\"unknown twill command: '%s'\"",
"%",
"(",
"cmd",
",",
")",
")",
")",
"eval_str",
"=",
"(",
"'%s(*__args__)'",
"%",
"(",
"cmd",
",",
")",
")",
"codeobj",
"=",
"compile",
"(",
"eval_str",
",",
"cmdinfo",
",",
"'eval'",
")",
"result",
"=",
"eval",
"(",
"codeobj",
",",
"globals_dict",
",",
"locals_dict",
")",
"locals_dict",
"[",
"'__url__'",
"]",
"=",
"commands",
".",
"browser",
".",
"get_url",
"(",
")",
"return",
"result"
] |
takes a command to be passed to subprocess .
|
train
| false
|
11,286
|
def register_json_typecasters(conn, loads_fn):
available = set()
for name in ['json', 'jsonb']:
try:
psycopg2.extras.register_json(conn, loads=loads_fn, name=name)
available.add(name)
except psycopg2.ProgrammingError:
pass
return available
|
[
"def",
"register_json_typecasters",
"(",
"conn",
",",
"loads_fn",
")",
":",
"available",
"=",
"set",
"(",
")",
"for",
"name",
"in",
"[",
"'json'",
",",
"'jsonb'",
"]",
":",
"try",
":",
"psycopg2",
".",
"extras",
".",
"register_json",
"(",
"conn",
",",
"loads",
"=",
"loads_fn",
",",
"name",
"=",
"name",
")",
"available",
".",
"add",
"(",
"name",
")",
"except",
"psycopg2",
".",
"ProgrammingError",
":",
"pass",
"return",
"available"
] |
set the function for converting json data for a connection .
|
train
| false
|
11,287
|
def run_commit_hook(name, index):
hp = hook_path(name, index.repo.git_dir)
if (not os.access(hp, os.X_OK)):
return
env = os.environ.copy()
env['GIT_INDEX_FILE'] = (safe_decode(index.path) if PY3 else safe_encode(index.path))
env['GIT_EDITOR'] = ':'
try:
cmd = subprocess.Popen(hp, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=index.repo.working_dir, close_fds=is_posix, creationflags=PROC_CREATIONFLAGS)
except Exception as ex:
raise HookExecutionError(hp, ex)
else:
stdout = []
stderr = []
handle_process_output(cmd, stdout.append, stderr.append, finalize_process)
stdout = ''.join(stdout)
stderr = ''.join(stderr)
if (cmd.returncode != 0):
stdout = force_text(stdout, defenc)
stderr = force_text(stderr, defenc)
raise HookExecutionError(hp, cmd.returncode, stdout, stderr)
|
[
"def",
"run_commit_hook",
"(",
"name",
",",
"index",
")",
":",
"hp",
"=",
"hook_path",
"(",
"name",
",",
"index",
".",
"repo",
".",
"git_dir",
")",
"if",
"(",
"not",
"os",
".",
"access",
"(",
"hp",
",",
"os",
".",
"X_OK",
")",
")",
":",
"return",
"env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"env",
"[",
"'GIT_INDEX_FILE'",
"]",
"=",
"(",
"safe_decode",
"(",
"index",
".",
"path",
")",
"if",
"PY3",
"else",
"safe_encode",
"(",
"index",
".",
"path",
")",
")",
"env",
"[",
"'GIT_EDITOR'",
"]",
"=",
"':'",
"try",
":",
"cmd",
"=",
"subprocess",
".",
"Popen",
"(",
"hp",
",",
"env",
"=",
"env",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"cwd",
"=",
"index",
".",
"repo",
".",
"working_dir",
",",
"close_fds",
"=",
"is_posix",
",",
"creationflags",
"=",
"PROC_CREATIONFLAGS",
")",
"except",
"Exception",
"as",
"ex",
":",
"raise",
"HookExecutionError",
"(",
"hp",
",",
"ex",
")",
"else",
":",
"stdout",
"=",
"[",
"]",
"stderr",
"=",
"[",
"]",
"handle_process_output",
"(",
"cmd",
",",
"stdout",
".",
"append",
",",
"stderr",
".",
"append",
",",
"finalize_process",
")",
"stdout",
"=",
"''",
".",
"join",
"(",
"stdout",
")",
"stderr",
"=",
"''",
".",
"join",
"(",
"stderr",
")",
"if",
"(",
"cmd",
".",
"returncode",
"!=",
"0",
")",
":",
"stdout",
"=",
"force_text",
"(",
"stdout",
",",
"defenc",
")",
"stderr",
"=",
"force_text",
"(",
"stderr",
",",
"defenc",
")",
"raise",
"HookExecutionError",
"(",
"hp",
",",
"cmd",
".",
"returncode",
",",
"stdout",
",",
"stderr",
")"
] |
run the commit hook of the given name .
|
train
| true
|
11,288
|
def test_email_is_saved_in_order(authorized_client, billing_address, customer_user, request_cart_with_item, shipping_method):
customer_user.addresses.add(billing_address)
request_cart_with_item.user = customer_user
request_cart_with_item.save()
shipping_address = authorized_client.get(reverse('checkout:index'), follow=True)
shipping_data = {'address': billing_address.pk}
shipping_method_page = authorized_client.post(shipping_address.request['PATH_INFO'], data=shipping_data, follow=True)
shipping_method_data = {'method': shipping_method.pk}
shipping_method_response = authorized_client.post(shipping_method_page.request['PATH_INFO'], data=shipping_method_data, follow=True)
payment_method_data = {'address': 'shipping_address'}
payment_method_page = authorized_client.post(shipping_method_response.request['PATH_INFO'], data=payment_method_data, follow=True)
order = payment_method_page.context['order']
assert (order.user_email == customer_user.email)
|
[
"def",
"test_email_is_saved_in_order",
"(",
"authorized_client",
",",
"billing_address",
",",
"customer_user",
",",
"request_cart_with_item",
",",
"shipping_method",
")",
":",
"customer_user",
".",
"addresses",
".",
"add",
"(",
"billing_address",
")",
"request_cart_with_item",
".",
"user",
"=",
"customer_user",
"request_cart_with_item",
".",
"save",
"(",
")",
"shipping_address",
"=",
"authorized_client",
".",
"get",
"(",
"reverse",
"(",
"'checkout:index'",
")",
",",
"follow",
"=",
"True",
")",
"shipping_data",
"=",
"{",
"'address'",
":",
"billing_address",
".",
"pk",
"}",
"shipping_method_page",
"=",
"authorized_client",
".",
"post",
"(",
"shipping_address",
".",
"request",
"[",
"'PATH_INFO'",
"]",
",",
"data",
"=",
"shipping_data",
",",
"follow",
"=",
"True",
")",
"shipping_method_data",
"=",
"{",
"'method'",
":",
"shipping_method",
".",
"pk",
"}",
"shipping_method_response",
"=",
"authorized_client",
".",
"post",
"(",
"shipping_method_page",
".",
"request",
"[",
"'PATH_INFO'",
"]",
",",
"data",
"=",
"shipping_method_data",
",",
"follow",
"=",
"True",
")",
"payment_method_data",
"=",
"{",
"'address'",
":",
"'shipping_address'",
"}",
"payment_method_page",
"=",
"authorized_client",
".",
"post",
"(",
"shipping_method_response",
".",
"request",
"[",
"'PATH_INFO'",
"]",
",",
"data",
"=",
"payment_method_data",
",",
"follow",
"=",
"True",
")",
"order",
"=",
"payment_method_page",
".",
"context",
"[",
"'order'",
"]",
"assert",
"(",
"order",
".",
"user_email",
"==",
"customer_user",
".",
"email",
")"
] |
authorized user change own email after checkout - if is not changed in order .
|
train
| false
|
11,292
|
def send_api_fault(url, status, exception):
if (not CONF.notify_api_faults):
return
payload = {'url': url, 'exception': str(exception), 'status': status}
publisher_id = notifier_api.publisher_id('api')
notifier_api.notify(None, publisher_id, 'api.fault', notifier_api.ERROR, payload)
|
[
"def",
"send_api_fault",
"(",
"url",
",",
"status",
",",
"exception",
")",
":",
"if",
"(",
"not",
"CONF",
".",
"notify_api_faults",
")",
":",
"return",
"payload",
"=",
"{",
"'url'",
":",
"url",
",",
"'exception'",
":",
"str",
"(",
"exception",
")",
",",
"'status'",
":",
"status",
"}",
"publisher_id",
"=",
"notifier_api",
".",
"publisher_id",
"(",
"'api'",
")",
"notifier_api",
".",
"notify",
"(",
"None",
",",
"publisher_id",
",",
"'api.fault'",
",",
"notifier_api",
".",
"ERROR",
",",
"payload",
")"
] |
send an api .
|
train
| false
|
11,293
|
def is_callable(obj):
if (not callable(obj)):
raise ValueError('Value must be a callable')
return True
|
[
"def",
"is_callable",
"(",
"obj",
")",
":",
"if",
"(",
"not",
"callable",
"(",
"obj",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Value must be a callable'",
")",
"return",
"True"
] |
return true if thing is callable .
|
train
| false
|
11,295
|
def get_short_module_name(module_name, obj_name):
parts = module_name.split('.')
short_name = module_name
for i in range((len(parts) - 1), 0, (-1)):
short_name = '.'.join(parts[:i])
try:
exec ('from %s import %s' % (short_name, obj_name))
except Exception:
short_name = '.'.join(parts[:(i + 1)])
break
return short_name
|
[
"def",
"get_short_module_name",
"(",
"module_name",
",",
"obj_name",
")",
":",
"parts",
"=",
"module_name",
".",
"split",
"(",
"'.'",
")",
"short_name",
"=",
"module_name",
"for",
"i",
"in",
"range",
"(",
"(",
"len",
"(",
"parts",
")",
"-",
"1",
")",
",",
"0",
",",
"(",
"-",
"1",
")",
")",
":",
"short_name",
"=",
"'.'",
".",
"join",
"(",
"parts",
"[",
":",
"i",
"]",
")",
"try",
":",
"exec",
"(",
"'from %s import %s'",
"%",
"(",
"short_name",
",",
"obj_name",
")",
")",
"except",
"Exception",
":",
"short_name",
"=",
"'.'",
".",
"join",
"(",
"parts",
"[",
":",
"(",
"i",
"+",
"1",
")",
"]",
")",
"break",
"return",
"short_name"
] |
get the shortest possible module name .
|
train
| true
|
11,296
|
def get_site_encoding():
global SITE_ENCODING
if (SITE_ENCODING is None):
encoding = desktop.conf.DEFAULT_SITE_ENCODING.get()
if (not validate_encoding(encoding)):
default = desktop.conf.DEFAULT_SITE_ENCODING.config.default_value
msg = ('Invalid HUE configuration value for %s: "%s". Using default "%s"' % (desktop.conf.DEFAULT_SITE_ENCODING.config.key, encoding, default))
logging.error(msg)
encoding = default
SITE_ENCODING = encoding
return SITE_ENCODING
|
[
"def",
"get_site_encoding",
"(",
")",
":",
"global",
"SITE_ENCODING",
"if",
"(",
"SITE_ENCODING",
"is",
"None",
")",
":",
"encoding",
"=",
"desktop",
".",
"conf",
".",
"DEFAULT_SITE_ENCODING",
".",
"get",
"(",
")",
"if",
"(",
"not",
"validate_encoding",
"(",
"encoding",
")",
")",
":",
"default",
"=",
"desktop",
".",
"conf",
".",
"DEFAULT_SITE_ENCODING",
".",
"config",
".",
"default_value",
"msg",
"=",
"(",
"'Invalid HUE configuration value for %s: \"%s\". Using default \"%s\"'",
"%",
"(",
"desktop",
".",
"conf",
".",
"DEFAULT_SITE_ENCODING",
".",
"config",
".",
"key",
",",
"encoding",
",",
"default",
")",
")",
"logging",
".",
"error",
"(",
"msg",
")",
"encoding",
"=",
"default",
"SITE_ENCODING",
"=",
"encoding",
"return",
"SITE_ENCODING"
] |
get the default site encoding .
|
train
| false
|
11,297
|
def GetSitelinksFromFeed(client, feed):
feed_mappings = GetFeedMapping(client, feed, PLACEHOLDER_TYPE_SITELINKS)
feed_items = {}
for feed_item in GetFeedItems(client, feed):
site_link_from_feed = {}
for attribute_value in feed_item['attributeValues']:
if (attribute_value['feedAttributeId'] in feed_mappings):
for field_id in feed_mappings[attribute_value['feedAttributeId']]:
if (field_id == SITE_LINK_FIELDS['TEXT']):
site_link_from_feed['text'] = attribute_value['stringValue']
elif (field_id == SITE_LINK_FIELDS['URL']):
site_link_from_feed['url'] = attribute_value['stringValue']
elif (field_id == SITE_LINK_FIELDS['FINAL_URLS']):
site_link_from_feed['finalUrls'] = attribute_value['stringValues']
elif (field_id == SITE_LINK_FIELDS['FINAL_MOBILE_URLS']):
site_link_from_feed['finalMobileUrls'] = attribute_value['stringValues']
elif (field_id == SITE_LINK_FIELDS['TRACKING_URL_TEMPLATE']):
site_link_from_feed['trackingUrlTemplate'] = attribute_value['stringValue']
elif (field_id == SITE_LINK_FIELDS['LINE2']):
site_link_from_feed['line2'] = attribute_value['stringValue']
elif (field_id == SITE_LINK_FIELDS['LINE3']):
site_link_from_feed['line3'] = attribute_value['stringValue']
else:
print ('No applicable Site Link Field found for Id: %s' % field_id)
if ('scheduling' in feed_item):
site_link_from_feed['scheduling'] = feed_item['scheduling']
feed_items[feed_item['feedItemId']] = site_link_from_feed
return feed_items
|
[
"def",
"GetSitelinksFromFeed",
"(",
"client",
",",
"feed",
")",
":",
"feed_mappings",
"=",
"GetFeedMapping",
"(",
"client",
",",
"feed",
",",
"PLACEHOLDER_TYPE_SITELINKS",
")",
"feed_items",
"=",
"{",
"}",
"for",
"feed_item",
"in",
"GetFeedItems",
"(",
"client",
",",
"feed",
")",
":",
"site_link_from_feed",
"=",
"{",
"}",
"for",
"attribute_value",
"in",
"feed_item",
"[",
"'attributeValues'",
"]",
":",
"if",
"(",
"attribute_value",
"[",
"'feedAttributeId'",
"]",
"in",
"feed_mappings",
")",
":",
"for",
"field_id",
"in",
"feed_mappings",
"[",
"attribute_value",
"[",
"'feedAttributeId'",
"]",
"]",
":",
"if",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'TEXT'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'text'",
"]",
"=",
"attribute_value",
"[",
"'stringValue'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'URL'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'url'",
"]",
"=",
"attribute_value",
"[",
"'stringValue'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'FINAL_URLS'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'finalUrls'",
"]",
"=",
"attribute_value",
"[",
"'stringValues'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'FINAL_MOBILE_URLS'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'finalMobileUrls'",
"]",
"=",
"attribute_value",
"[",
"'stringValues'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'TRACKING_URL_TEMPLATE'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'trackingUrlTemplate'",
"]",
"=",
"attribute_value",
"[",
"'stringValue'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'LINE2'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'line2'",
"]",
"=",
"attribute_value",
"[",
"'stringValue'",
"]",
"elif",
"(",
"field_id",
"==",
"SITE_LINK_FIELDS",
"[",
"'LINE3'",
"]",
")",
":",
"site_link_from_feed",
"[",
"'line3'",
"]",
"=",
"attribute_value",
"[",
"'stringValue'",
"]",
"else",
":",
"print",
"(",
"'No applicable Site Link Field found for Id: %s'",
"%",
"field_id",
")",
"if",
"(",
"'scheduling'",
"in",
"feed_item",
")",
":",
"site_link_from_feed",
"[",
"'scheduling'",
"]",
"=",
"feed_item",
"[",
"'scheduling'",
"]",
"feed_items",
"[",
"feed_item",
"[",
"'feedItemId'",
"]",
"]",
"=",
"site_link_from_feed",
"return",
"feed_items"
] |
gets the sitelinks from a feed .
|
train
| true
|
11,298
|
def apply_boosts(searcher):
return searcher.boost(question_title=4.0, question_content=3.0, question_answer_content=3.0, post_title=2.0, post_content=1.0, document_title=6.0, document_content=1.0, document_keywords=8.0, document_summary=2.0, document_title__match_phrase=10.0, document_content__match_phrase=8.0)
|
[
"def",
"apply_boosts",
"(",
"searcher",
")",
":",
"return",
"searcher",
".",
"boost",
"(",
"question_title",
"=",
"4.0",
",",
"question_content",
"=",
"3.0",
",",
"question_answer_content",
"=",
"3.0",
",",
"post_title",
"=",
"2.0",
",",
"post_content",
"=",
"1.0",
",",
"document_title",
"=",
"6.0",
",",
"document_content",
"=",
"1.0",
",",
"document_keywords",
"=",
"8.0",
",",
"document_summary",
"=",
"2.0",
",",
"document_title__match_phrase",
"=",
"10.0",
",",
"document_content__match_phrase",
"=",
"8.0",
")"
] |
returns searcher with boosts applied .
|
train
| false
|
11,299
|
def Cdfs(cdfs, complement=False, transform=None, **options):
for cdf in cdfs:
Cdf(cdf, complement, transform, **options)
|
[
"def",
"Cdfs",
"(",
"cdfs",
",",
"complement",
"=",
"False",
",",
"transform",
"=",
"None",
",",
"**",
"options",
")",
":",
"for",
"cdf",
"in",
"cdfs",
":",
"Cdf",
"(",
"cdf",
",",
"complement",
",",
"transform",
",",
"**",
"options",
")"
] |
plots a sequence of cdfs .
|
train
| false
|
11,300
|
@task
def delete_old_documentspamattempt_data(days=30):
older = (datetime.now() - timedelta(days=30))
dsas = DocumentSpamAttempt.objects.filter(created__lt=older).exclude(data__isnull=True)
dsas_reviewed = dsas.exclude(review=DocumentSpamAttempt.NEEDS_REVIEW)
dsas_unreviewed = dsas.filter(review=DocumentSpamAttempt.NEEDS_REVIEW)
dsas_reviewed.update(data=None)
dsas_unreviewed.update(data=None, review=DocumentSpamAttempt.REVIEW_UNAVAILABLE)
|
[
"@",
"task",
"def",
"delete_old_documentspamattempt_data",
"(",
"days",
"=",
"30",
")",
":",
"older",
"=",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"timedelta",
"(",
"days",
"=",
"30",
")",
")",
"dsas",
"=",
"DocumentSpamAttempt",
".",
"objects",
".",
"filter",
"(",
"created__lt",
"=",
"older",
")",
".",
"exclude",
"(",
"data__isnull",
"=",
"True",
")",
"dsas_reviewed",
"=",
"dsas",
".",
"exclude",
"(",
"review",
"=",
"DocumentSpamAttempt",
".",
"NEEDS_REVIEW",
")",
"dsas_unreviewed",
"=",
"dsas",
".",
"filter",
"(",
"review",
"=",
"DocumentSpamAttempt",
".",
"NEEDS_REVIEW",
")",
"dsas_reviewed",
".",
"update",
"(",
"data",
"=",
"None",
")",
"dsas_unreviewed",
".",
"update",
"(",
"data",
"=",
"None",
",",
"review",
"=",
"DocumentSpamAttempt",
".",
"REVIEW_UNAVAILABLE",
")"
] |
delete old documentspamattempt .
|
train
| false
|
11,301
|
def video_screen_size(filename):
if ((filename in bad_files) or (not sickbeard.helpers.isMediaFile(filename))):
return (None, None)
for method in [_mkv_screen_size, _avi_screen_size]:
screen_size = method(filename)
if (screen_size != (None, None)):
return screen_size
bad_files.add(filename)
return (None, None)
|
[
"def",
"video_screen_size",
"(",
"filename",
")",
":",
"if",
"(",
"(",
"filename",
"in",
"bad_files",
")",
"or",
"(",
"not",
"sickbeard",
".",
"helpers",
".",
"isMediaFile",
"(",
"filename",
")",
")",
")",
":",
"return",
"(",
"None",
",",
"None",
")",
"for",
"method",
"in",
"[",
"_mkv_screen_size",
",",
"_avi_screen_size",
"]",
":",
"screen_size",
"=",
"method",
"(",
"filename",
")",
"if",
"(",
"screen_size",
"!=",
"(",
"None",
",",
"None",
")",
")",
":",
"return",
"screen_size",
"bad_files",
".",
"add",
"(",
"filename",
")",
"return",
"(",
"None",
",",
"None",
")"
] |
attempts to read the width and height of a video file .
|
train
| false
|
11,302
|
def make_error_view(error_status):
def view(request, *args, **kwargs):
handler_attr = (HANDLER_ATTR_FMT % error_status)
for handler_spec in settings.SHUUP_ERROR_PAGE_HANDLERS_SPEC:
handler = load(handler_spec)()
if handler.can_handle_error(request, error_status):
return handler.handle_error(request, error_status)
fallback_handler = _URLCONF_ERROR_HANDLERS.get(handler_attr)
if (fallback_handler and callable(fallback_handler)):
return fallback_handler(request)
else:
return HttpResponse(status=error_status)
return view
|
[
"def",
"make_error_view",
"(",
"error_status",
")",
":",
"def",
"view",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"handler_attr",
"=",
"(",
"HANDLER_ATTR_FMT",
"%",
"error_status",
")",
"for",
"handler_spec",
"in",
"settings",
".",
"SHUUP_ERROR_PAGE_HANDLERS_SPEC",
":",
"handler",
"=",
"load",
"(",
"handler_spec",
")",
"(",
")",
"if",
"handler",
".",
"can_handle_error",
"(",
"request",
",",
"error_status",
")",
":",
"return",
"handler",
".",
"handle_error",
"(",
"request",
",",
"error_status",
")",
"fallback_handler",
"=",
"_URLCONF_ERROR_HANDLERS",
".",
"get",
"(",
"handler_attr",
")",
"if",
"(",
"fallback_handler",
"and",
"callable",
"(",
"fallback_handler",
")",
")",
":",
"return",
"fallback_handler",
"(",
"request",
")",
"else",
":",
"return",
"HttpResponse",
"(",
"status",
"=",
"error_status",
")",
"return",
"view"
] |
a factory of error views which tries to find a compatible error handler if not handler can do the job .
|
train
| false
|
11,303
|
@pytest.mark.network
def test_install_editable_uninstalls_existing(data, script, tmpdir):
to_install = data.packages.join('pip-test-package-0.1.tar.gz')
result = script.pip_install_local(to_install)
assert ('Successfully installed pip-test-package' in result.stdout)
result.assert_installed('piptestpackage', editable=False)
result = script.pip('install', '-e', ('%s#egg=pip-test-package' % local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join('cache'))))
result.assert_installed('pip-test-package', with_files=['.git'])
assert ('Found existing installation: pip-test-package 0.1' in result.stdout)
assert ('Uninstalling pip-test-package-' in result.stdout)
assert ('Successfully uninstalled pip-test-package' in result.stdout)
|
[
"@",
"pytest",
".",
"mark",
".",
"network",
"def",
"test_install_editable_uninstalls_existing",
"(",
"data",
",",
"script",
",",
"tmpdir",
")",
":",
"to_install",
"=",
"data",
".",
"packages",
".",
"join",
"(",
"'pip-test-package-0.1.tar.gz'",
")",
"result",
"=",
"script",
".",
"pip_install_local",
"(",
"to_install",
")",
"assert",
"(",
"'Successfully installed pip-test-package'",
"in",
"result",
".",
"stdout",
")",
"result",
".",
"assert_installed",
"(",
"'piptestpackage'",
",",
"editable",
"=",
"False",
")",
"result",
"=",
"script",
".",
"pip",
"(",
"'install'",
",",
"'-e'",
",",
"(",
"'%s#egg=pip-test-package'",
"%",
"local_checkout",
"(",
"'git+http://github.com/pypa/pip-test-package.git'",
",",
"tmpdir",
".",
"join",
"(",
"'cache'",
")",
")",
")",
")",
"result",
".",
"assert_installed",
"(",
"'pip-test-package'",
",",
"with_files",
"=",
"[",
"'.git'",
"]",
")",
"assert",
"(",
"'Found existing installation: pip-test-package 0.1'",
"in",
"result",
".",
"stdout",
")",
"assert",
"(",
"'Uninstalling pip-test-package-'",
"in",
"result",
".",
"stdout",
")",
"assert",
"(",
"'Successfully uninstalled pip-test-package'",
"in",
"result",
".",
"stdout",
")"
] |
test that installing an editable uninstalls a previously installed non-editable version .
|
train
| false
|
11,304
|
def PointCollection(mode='raw', *args, **kwargs):
if (mode == 'raw'):
return RawPointCollection(*args, **kwargs)
return AggPointCollection(*args, **kwargs)
|
[
"def",
"PointCollection",
"(",
"mode",
"=",
"'raw'",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"mode",
"==",
"'raw'",
")",
":",
"return",
"RawPointCollection",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"AggPointCollection",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] |
mode: string - "raw" - "agg" .
|
train
| true
|
11,306
|
def complete_import(prefix, line, start, end, ctx):
ltoks = line.split()
ntoks = len(ltoks)
if ((ntoks == 2) and (ltoks[0] == 'from')):
return {'{} '.format(i) for i in complete_module(prefix)}
if ((ntoks > 1) and (ltoks[0] == 'import') and (start == len('import '))):
return complete_module(prefix)
if ((ntoks > 2) and (ltoks[0] == 'from') and (ltoks[2] == 'import')):
try:
mod = importlib.import_module(ltoks[1])
except ImportError:
return set()
out = {i[0] for i in inspect.getmembers(mod) if i[0].startswith(prefix)}
return out
return set()
|
[
"def",
"complete_import",
"(",
"prefix",
",",
"line",
",",
"start",
",",
"end",
",",
"ctx",
")",
":",
"ltoks",
"=",
"line",
".",
"split",
"(",
")",
"ntoks",
"=",
"len",
"(",
"ltoks",
")",
"if",
"(",
"(",
"ntoks",
"==",
"2",
")",
"and",
"(",
"ltoks",
"[",
"0",
"]",
"==",
"'from'",
")",
")",
":",
"return",
"{",
"'{} '",
".",
"format",
"(",
"i",
")",
"for",
"i",
"in",
"complete_module",
"(",
"prefix",
")",
"}",
"if",
"(",
"(",
"ntoks",
">",
"1",
")",
"and",
"(",
"ltoks",
"[",
"0",
"]",
"==",
"'import'",
")",
"and",
"(",
"start",
"==",
"len",
"(",
"'import '",
")",
")",
")",
":",
"return",
"complete_module",
"(",
"prefix",
")",
"if",
"(",
"(",
"ntoks",
">",
"2",
")",
"and",
"(",
"ltoks",
"[",
"0",
"]",
"==",
"'from'",
")",
"and",
"(",
"ltoks",
"[",
"2",
"]",
"==",
"'import'",
")",
")",
":",
"try",
":",
"mod",
"=",
"importlib",
".",
"import_module",
"(",
"ltoks",
"[",
"1",
"]",
")",
"except",
"ImportError",
":",
"return",
"set",
"(",
")",
"out",
"=",
"{",
"i",
"[",
"0",
"]",
"for",
"i",
"in",
"inspect",
".",
"getmembers",
"(",
"mod",
")",
"if",
"i",
"[",
"0",
"]",
".",
"startswith",
"(",
"prefix",
")",
"}",
"return",
"out",
"return",
"set",
"(",
")"
] |
completes module names and contents for "import .
|
train
| false
|
11,308
|
def print_options():
print ''
|
[
"def",
"print_options",
"(",
")",
":",
"print",
"''"
] |
displays usage information .
|
train
| false
|
11,309
|
def captured_stderr():
return captured_output('stderr')
|
[
"def",
"captured_stderr",
"(",
")",
":",
"return",
"captured_output",
"(",
"'stderr'",
")"
] |
capture the output of sys .
|
train
| false
|
11,310
|
def _chop_end_codes(line):
return re.sub('\\s\\s\\s\\s+[\\w]{4}.\\s+\\d*\\Z', '', line)
|
[
"def",
"_chop_end_codes",
"(",
"line",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\\\s\\\\s\\\\s\\\\s+[\\\\w]{4}.\\\\s+\\\\d*\\\\Z'",
",",
"''",
",",
"line",
")"
] |
chops lines ending with 1csa 14 and the like .
|
train
| false
|
11,311
|
def http_auth_request(url, host, user, passwd, user_agent=USER_AGENT):
if DEBUG:
httplib.HTTPConnection.debuglevel = 1
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, host, user, passwd)
auth_handler = urllib2.HTTPBasicAuthHandler(password_manager)
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
return http_request(url, user_agent)
|
[
"def",
"http_auth_request",
"(",
"url",
",",
"host",
",",
"user",
",",
"passwd",
",",
"user_agent",
"=",
"USER_AGENT",
")",
":",
"if",
"DEBUG",
":",
"httplib",
".",
"HTTPConnection",
".",
"debuglevel",
"=",
"1",
"password_manager",
"=",
"urllib2",
".",
"HTTPPasswordMgrWithDefaultRealm",
"(",
")",
"password_manager",
".",
"add_password",
"(",
"None",
",",
"host",
",",
"user",
",",
"passwd",
")",
"auth_handler",
"=",
"urllib2",
".",
"HTTPBasicAuthHandler",
"(",
"password_manager",
")",
"opener",
"=",
"urllib2",
".",
"build_opener",
"(",
"auth_handler",
")",
"urllib2",
".",
"install_opener",
"(",
"opener",
")",
"return",
"http_request",
"(",
"url",
",",
"user_agent",
")"
] |
call an http server with authorization credentials using urllib2 .
|
train
| false
|
11,313
|
def test_install_python_bin():
tmp_virtualenv = tempfile.mkdtemp()
try:
(home_dir, lib_dir, inc_dir, bin_dir) = virtualenv.path_locations(tmp_virtualenv)
virtualenv.install_python(home_dir, lib_dir, inc_dir, bin_dir, False, False)
if virtualenv.is_win:
required_executables = ['python.exe', 'pythonw.exe']
else:
py_exe_no_version = 'python'
py_exe_version_major = ('python%s' % sys.version_info[0])
py_exe_version_major_minor = ('python%s.%s' % (sys.version_info[0], sys.version_info[1]))
required_executables = [py_exe_no_version, py_exe_version_major, py_exe_version_major_minor]
for pth in required_executables:
assert os.path.exists(os.path.join(bin_dir, pth)), ('%s should exist in bin_dir' % pth)
finally:
shutil.rmtree(tmp_virtualenv)
|
[
"def",
"test_install_python_bin",
"(",
")",
":",
"tmp_virtualenv",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"try",
":",
"(",
"home_dir",
",",
"lib_dir",
",",
"inc_dir",
",",
"bin_dir",
")",
"=",
"virtualenv",
".",
"path_locations",
"(",
"tmp_virtualenv",
")",
"virtualenv",
".",
"install_python",
"(",
"home_dir",
",",
"lib_dir",
",",
"inc_dir",
",",
"bin_dir",
",",
"False",
",",
"False",
")",
"if",
"virtualenv",
".",
"is_win",
":",
"required_executables",
"=",
"[",
"'python.exe'",
",",
"'pythonw.exe'",
"]",
"else",
":",
"py_exe_no_version",
"=",
"'python'",
"py_exe_version_major",
"=",
"(",
"'python%s'",
"%",
"sys",
".",
"version_info",
"[",
"0",
"]",
")",
"py_exe_version_major_minor",
"=",
"(",
"'python%s.%s'",
"%",
"(",
"sys",
".",
"version_info",
"[",
"0",
"]",
",",
"sys",
".",
"version_info",
"[",
"1",
"]",
")",
")",
"required_executables",
"=",
"[",
"py_exe_no_version",
",",
"py_exe_version_major",
",",
"py_exe_version_major_minor",
"]",
"for",
"pth",
"in",
"required_executables",
":",
"assert",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"bin_dir",
",",
"pth",
")",
")",
",",
"(",
"'%s should exist in bin_dir'",
"%",
"pth",
")",
"finally",
":",
"shutil",
".",
"rmtree",
"(",
"tmp_virtualenv",
")"
] |
should create the right python executables and links .
|
train
| false
|
11,315
|
def parseAuthorization(credentials):
global AUTH_SCHEMES
(auth_scheme, auth_params) = credentials.split(' ', 1)
auth_scheme = auth_scheme.lower()
parser = AUTH_SCHEMES[auth_scheme]
params = parser(auth_params)
if (params is None):
return
assert ('auth_scheme' not in params)
params['auth_scheme'] = auth_scheme
return params
|
[
"def",
"parseAuthorization",
"(",
"credentials",
")",
":",
"global",
"AUTH_SCHEMES",
"(",
"auth_scheme",
",",
"auth_params",
")",
"=",
"credentials",
".",
"split",
"(",
"' '",
",",
"1",
")",
"auth_scheme",
"=",
"auth_scheme",
".",
"lower",
"(",
")",
"parser",
"=",
"AUTH_SCHEMES",
"[",
"auth_scheme",
"]",
"params",
"=",
"parser",
"(",
"auth_params",
")",
"if",
"(",
"params",
"is",
"None",
")",
":",
"return",
"assert",
"(",
"'auth_scheme'",
"not",
"in",
"params",
")",
"params",
"[",
"'auth_scheme'",
"]",
"=",
"auth_scheme",
"return",
"params"
] |
parseauthorization will convert the value of the authorization key in the http header to a map itself .
|
train
| false
|
11,318
|
@handle_response_format
@treeio_login_required
def index(request, response_format='html'):
profile = request.user.profile
query = _get_filter_query(profile, filters=request.GET)
updates = UpdateRecord.objects.filter(query).distinct()
if request.POST:
record = UpdateRecord()
record.record_type = 'share'
form = UpdateRecordForm(request.POST, user=profile, instance=record)
if form.is_valid():
record = form.save()
record.body = record.body.replace('\n', ' <br />')
record.save()
record.set_user_from_request(request)
return HttpResponseRedirect(reverse('news_index'))
else:
form = UpdateRecordForm(user=profile)
if (response_format == 'rss'):
return ObjectFeed(title=_('All Activity'), link=request.path, description=_('Updates on activity in your Tree.io'), objects=updates)(request)
context = _get_default_context(request)
context.update({'form': form, 'updates': updates, 'profile': profile})
return render_to_response('news/index', context, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"index",
"(",
"request",
",",
"response_format",
"=",
"'html'",
")",
":",
"profile",
"=",
"request",
".",
"user",
".",
"profile",
"query",
"=",
"_get_filter_query",
"(",
"profile",
",",
"filters",
"=",
"request",
".",
"GET",
")",
"updates",
"=",
"UpdateRecord",
".",
"objects",
".",
"filter",
"(",
"query",
")",
".",
"distinct",
"(",
")",
"if",
"request",
".",
"POST",
":",
"record",
"=",
"UpdateRecord",
"(",
")",
"record",
".",
"record_type",
"=",
"'share'",
"form",
"=",
"UpdateRecordForm",
"(",
"request",
".",
"POST",
",",
"user",
"=",
"profile",
",",
"instance",
"=",
"record",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"record",
"=",
"form",
".",
"save",
"(",
")",
"record",
".",
"body",
"=",
"record",
".",
"body",
".",
"replace",
"(",
"'\\n'",
",",
"' <br />'",
")",
"record",
".",
"save",
"(",
")",
"record",
".",
"set_user_from_request",
"(",
"request",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'news_index'",
")",
")",
"else",
":",
"form",
"=",
"UpdateRecordForm",
"(",
"user",
"=",
"profile",
")",
"if",
"(",
"response_format",
"==",
"'rss'",
")",
":",
"return",
"ObjectFeed",
"(",
"title",
"=",
"_",
"(",
"'All Activity'",
")",
",",
"link",
"=",
"request",
".",
"path",
",",
"description",
"=",
"_",
"(",
"'Updates on activity in your Tree.io'",
")",
",",
"objects",
"=",
"updates",
")",
"(",
"request",
")",
"context",
"=",
"_get_default_context",
"(",
"request",
")",
"context",
".",
"update",
"(",
"{",
"'form'",
":",
"form",
",",
"'updates'",
":",
"updates",
",",
"'profile'",
":",
"profile",
"}",
")",
"return",
"render_to_response",
"(",
"'news/index'",
",",
"context",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
index -> int like find but raises valueerror when the substring is not found .
|
train
| false
|
11,319
|
def get_dbname(sockfile):
if (sockfile in DEFAULT_SOCKFILES):
return 'default'
m = re.search('/mysql-(.+)/[^.]+\\.sock$', sockfile)
if (not m):
utils.err(("error: couldn't guess the name of the DB for " + sockfile))
return None
return m.group(1)
|
[
"def",
"get_dbname",
"(",
"sockfile",
")",
":",
"if",
"(",
"sockfile",
"in",
"DEFAULT_SOCKFILES",
")",
":",
"return",
"'default'",
"m",
"=",
"re",
".",
"search",
"(",
"'/mysql-(.+)/[^.]+\\\\.sock$'",
",",
"sockfile",
")",
"if",
"(",
"not",
"m",
")",
":",
"utils",
".",
"err",
"(",
"(",
"\"error: couldn't guess the name of the DB for \"",
"+",
"sockfile",
")",
")",
"return",
"None",
"return",
"m",
".",
"group",
"(",
"1",
")"
] |
returns the name of the db based on the path to the socket file .
|
train
| false
|
11,320
|
def shelter_type():
output = s3_rest_controller()
return output
|
[
"def",
"shelter_type",
"(",
")",
":",
"output",
"=",
"s3_rest_controller",
"(",
")",
"return",
"output"
] |
restful crud controller list / add shelter types .
|
train
| false
|
11,321
|
def find_peaks_cwt(vector, widths, wavelet=None, max_distances=None, gap_thresh=None, min_length=None, min_snr=1, noise_perc=10):
widths = np.asarray(widths)
if (gap_thresh is None):
gap_thresh = np.ceil(widths[0])
if (max_distances is None):
max_distances = (widths / 4.0)
if (wavelet is None):
wavelet = ricker
cwt_dat = cwt(vector, wavelet, widths)
ridge_lines = _identify_ridge_lines(cwt_dat, max_distances, gap_thresh)
filtered = _filter_ridge_lines(cwt_dat, ridge_lines, min_length=min_length, min_snr=min_snr, noise_perc=noise_perc)
max_locs = np.asarray([x[1][0] for x in filtered])
max_locs.sort()
return max_locs
|
[
"def",
"find_peaks_cwt",
"(",
"vector",
",",
"widths",
",",
"wavelet",
"=",
"None",
",",
"max_distances",
"=",
"None",
",",
"gap_thresh",
"=",
"None",
",",
"min_length",
"=",
"None",
",",
"min_snr",
"=",
"1",
",",
"noise_perc",
"=",
"10",
")",
":",
"widths",
"=",
"np",
".",
"asarray",
"(",
"widths",
")",
"if",
"(",
"gap_thresh",
"is",
"None",
")",
":",
"gap_thresh",
"=",
"np",
".",
"ceil",
"(",
"widths",
"[",
"0",
"]",
")",
"if",
"(",
"max_distances",
"is",
"None",
")",
":",
"max_distances",
"=",
"(",
"widths",
"/",
"4.0",
")",
"if",
"(",
"wavelet",
"is",
"None",
")",
":",
"wavelet",
"=",
"ricker",
"cwt_dat",
"=",
"cwt",
"(",
"vector",
",",
"wavelet",
",",
"widths",
")",
"ridge_lines",
"=",
"_identify_ridge_lines",
"(",
"cwt_dat",
",",
"max_distances",
",",
"gap_thresh",
")",
"filtered",
"=",
"_filter_ridge_lines",
"(",
"cwt_dat",
",",
"ridge_lines",
",",
"min_length",
"=",
"min_length",
",",
"min_snr",
"=",
"min_snr",
",",
"noise_perc",
"=",
"noise_perc",
")",
"max_locs",
"=",
"np",
".",
"asarray",
"(",
"[",
"x",
"[",
"1",
"]",
"[",
"0",
"]",
"for",
"x",
"in",
"filtered",
"]",
")",
"max_locs",
".",
"sort",
"(",
")",
"return",
"max_locs"
] |
attempt to find the peaks in a 1-d array .
|
train
| false
|
11,322
|
def extract_stack(f=None, limit=None):
if (f is None):
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while ((f is not None) and ((limit is None) or (n < limit))):
lineno = f.f_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append((filename, lineno, name, line))
f = f.f_back
n = (n + 1)
list.reverse()
return list
|
[
"def",
"extract_stack",
"(",
"f",
"=",
"None",
",",
"limit",
"=",
"None",
")",
":",
"if",
"(",
"f",
"is",
"None",
")",
":",
"try",
":",
"raise",
"ZeroDivisionError",
"except",
"ZeroDivisionError",
":",
"f",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
".",
"tb_frame",
".",
"f_back",
"if",
"(",
"limit",
"is",
"None",
")",
":",
"if",
"hasattr",
"(",
"sys",
",",
"'tracebacklimit'",
")",
":",
"limit",
"=",
"sys",
".",
"tracebacklimit",
"list",
"=",
"[",
"]",
"n",
"=",
"0",
"while",
"(",
"(",
"f",
"is",
"not",
"None",
")",
"and",
"(",
"(",
"limit",
"is",
"None",
")",
"or",
"(",
"n",
"<",
"limit",
")",
")",
")",
":",
"lineno",
"=",
"f",
".",
"f_lineno",
"co",
"=",
"f",
".",
"f_code",
"filename",
"=",
"co",
".",
"co_filename",
"name",
"=",
"co",
".",
"co_name",
"linecache",
".",
"checkcache",
"(",
"filename",
")",
"line",
"=",
"linecache",
".",
"getline",
"(",
"filename",
",",
"lineno",
",",
"f",
".",
"f_globals",
")",
"if",
"line",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"else",
":",
"line",
"=",
"None",
"list",
".",
"append",
"(",
"(",
"filename",
",",
"lineno",
",",
"name",
",",
"line",
")",
")",
"f",
"=",
"f",
".",
"f_back",
"n",
"=",
"(",
"n",
"+",
"1",
")",
"list",
".",
"reverse",
"(",
")",
"return",
"list"
] |
extract the raw traceback from the current stack frame .
|
train
| true
|
11,324
|
def getSquareValuesFromPoint(pixelDictionary, point):
return getSquareValues(pixelDictionary, int(round(point.real)), int(round(point.imag)))
|
[
"def",
"getSquareValuesFromPoint",
"(",
"pixelDictionary",
",",
"point",
")",
":",
"return",
"getSquareValues",
"(",
"pixelDictionary",
",",
"int",
"(",
"round",
"(",
"point",
".",
"real",
")",
")",
",",
"int",
"(",
"round",
"(",
"point",
".",
"imag",
")",
")",
")"
] |
get a list of the values in a square around the point .
|
train
| false
|
11,325
|
def create_or_update_draft(exp_id, user_id, change_list, exp_version, current_datetime):
exp_user_data = user_models.ExplorationUserDataModel.get(user_id, exp_id)
if (exp_user_data and exp_user_data.draft_change_list and (exp_user_data.draft_change_list_last_updated > current_datetime)):
return
updated_exploration = apply_change_list(exp_id, change_list)
updated_exploration.validate(strict=False)
if (exp_user_data is None):
exp_user_data = user_models.ExplorationUserDataModel.create(user_id, exp_id)
exp_user_data.draft_change_list = change_list
exp_user_data.draft_change_list_last_updated = current_datetime
exp_user_data.draft_change_list_exp_version = exp_version
exp_user_data.put()
|
[
"def",
"create_or_update_draft",
"(",
"exp_id",
",",
"user_id",
",",
"change_list",
",",
"exp_version",
",",
"current_datetime",
")",
":",
"exp_user_data",
"=",
"user_models",
".",
"ExplorationUserDataModel",
".",
"get",
"(",
"user_id",
",",
"exp_id",
")",
"if",
"(",
"exp_user_data",
"and",
"exp_user_data",
".",
"draft_change_list",
"and",
"(",
"exp_user_data",
".",
"draft_change_list_last_updated",
">",
"current_datetime",
")",
")",
":",
"return",
"updated_exploration",
"=",
"apply_change_list",
"(",
"exp_id",
",",
"change_list",
")",
"updated_exploration",
".",
"validate",
"(",
"strict",
"=",
"False",
")",
"if",
"(",
"exp_user_data",
"is",
"None",
")",
":",
"exp_user_data",
"=",
"user_models",
".",
"ExplorationUserDataModel",
".",
"create",
"(",
"user_id",
",",
"exp_id",
")",
"exp_user_data",
".",
"draft_change_list",
"=",
"change_list",
"exp_user_data",
".",
"draft_change_list_last_updated",
"=",
"current_datetime",
"exp_user_data",
".",
"draft_change_list_exp_version",
"=",
"exp_version",
"exp_user_data",
".",
"put",
"(",
")"
] |
create a draft with the given change list .
|
train
| false
|
11,326
|
def validate_station(station):
if (station is None):
return
station = station.replace('.shtml', '')
if (not re.fullmatch('ID[A-Z]\\d\\d\\d\\d\\d\\.\\d\\d\\d\\d\\d', station)):
raise vol.error.Invalid('Malformed station ID')
return station
|
[
"def",
"validate_station",
"(",
"station",
")",
":",
"if",
"(",
"station",
"is",
"None",
")",
":",
"return",
"station",
"=",
"station",
".",
"replace",
"(",
"'.shtml'",
",",
"''",
")",
"if",
"(",
"not",
"re",
".",
"fullmatch",
"(",
"'ID[A-Z]\\\\d\\\\d\\\\d\\\\d\\\\d\\\\.\\\\d\\\\d\\\\d\\\\d\\\\d'",
",",
"station",
")",
")",
":",
"raise",
"vol",
".",
"error",
".",
"Invalid",
"(",
"'Malformed station ID'",
")",
"return",
"station"
] |
check that the station id is well-formed .
|
train
| false
|
11,327
|
def prompt_for_exit():
g.message = ((c.r + 'Press ctrl-c again to exit') + c.w)
g.content = content.generate_songlist_display()
screen.update()
try:
userinput = input(((c.r + ' > ') + c.w))
except (KeyboardInterrupt, EOFError):
commands.misc.quits(showlogo=False)
return userinput
|
[
"def",
"prompt_for_exit",
"(",
")",
":",
"g",
".",
"message",
"=",
"(",
"(",
"c",
".",
"r",
"+",
"'Press ctrl-c again to exit'",
")",
"+",
"c",
".",
"w",
")",
"g",
".",
"content",
"=",
"content",
".",
"generate_songlist_display",
"(",
")",
"screen",
".",
"update",
"(",
")",
"try",
":",
"userinput",
"=",
"input",
"(",
"(",
"(",
"c",
".",
"r",
"+",
"' > '",
")",
"+",
"c",
".",
"w",
")",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"EOFError",
")",
":",
"commands",
".",
"misc",
".",
"quits",
"(",
"showlogo",
"=",
"False",
")",
"return",
"userinput"
] |
ask for exit confirmation .
|
train
| false
|
11,330
|
def parse_ssh_path(repo):
match = re.search('^ssh://([^/]+)(/.*)$', repo)
if match:
return match.groups()
else:
raise error.PackageUploadError(('Incorrect SSH path in settings: %s' % repo))
|
[
"def",
"parse_ssh_path",
"(",
"repo",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"'^ssh://([^/]+)(/.*)$'",
",",
"repo",
")",
"if",
"match",
":",
"return",
"match",
".",
"groups",
"(",
")",
"else",
":",
"raise",
"error",
".",
"PackageUploadError",
"(",
"(",
"'Incorrect SSH path in settings: %s'",
"%",
"repo",
")",
")"
] |
parse an ssh url :type repo: string .
|
train
| false
|
11,331
|
def remove_type(type_):
declaration = get_type(type_)
del TYPE_MAP[type_]
return declaration
|
[
"def",
"remove_type",
"(",
"type_",
")",
":",
"declaration",
"=",
"get_type",
"(",
"type_",
")",
"del",
"TYPE_MAP",
"[",
"type_",
"]",
"return",
"declaration"
] |
removes the custom type declaration .
|
train
| false
|
11,333
|
def expand_dims(a, axis):
shape = a.shape
if (axis < 0):
axis = ((axis + len(shape)) + 1)
return a.reshape(((shape[:axis] + (1,)) + shape[axis:]))
|
[
"def",
"expand_dims",
"(",
"a",
",",
"axis",
")",
":",
"shape",
"=",
"a",
".",
"shape",
"if",
"(",
"axis",
"<",
"0",
")",
":",
"axis",
"=",
"(",
"(",
"axis",
"+",
"len",
"(",
"shape",
")",
")",
"+",
"1",
")",
"return",
"a",
".",
"reshape",
"(",
"(",
"(",
"shape",
"[",
":",
"axis",
"]",
"+",
"(",
"1",
",",
")",
")",
"+",
"shape",
"[",
"axis",
":",
"]",
")",
")"
] |
adds a 1-sized dimension at index "dim" .
|
train
| false
|
11,334
|
@require_chanmsg
@require_privilege(OP, u'You are not a channel operator.')
@commands(u'ban')
@priority(u'high')
def ban(bot, trigger):
if (bot.privileges[trigger.sender][bot.nick] < HALFOP):
return bot.reply(u"I'm not a channel operator!")
text = trigger.group().split()
argc = len(text)
if (argc < 2):
return
opt = Identifier(text[1])
banmask = opt
channel = trigger.sender
if (not opt.is_nick()):
if (argc < 3):
return
channel = opt
banmask = text[2]
banmask = configureHostMask(banmask)
if (banmask == u''):
return
bot.write([u'MODE', channel, u'+b', banmask])
|
[
"@",
"require_chanmsg",
"@",
"require_privilege",
"(",
"OP",
",",
"u'You are not a channel operator.'",
")",
"@",
"commands",
"(",
"u'ban'",
")",
"@",
"priority",
"(",
"u'high'",
")",
"def",
"ban",
"(",
"bot",
",",
"trigger",
")",
":",
"if",
"(",
"bot",
".",
"privileges",
"[",
"trigger",
".",
"sender",
"]",
"[",
"bot",
".",
"nick",
"]",
"<",
"HALFOP",
")",
":",
"return",
"bot",
".",
"reply",
"(",
"u\"I'm not a channel operator!\"",
")",
"text",
"=",
"trigger",
".",
"group",
"(",
")",
".",
"split",
"(",
")",
"argc",
"=",
"len",
"(",
"text",
")",
"if",
"(",
"argc",
"<",
"2",
")",
":",
"return",
"opt",
"=",
"Identifier",
"(",
"text",
"[",
"1",
"]",
")",
"banmask",
"=",
"opt",
"channel",
"=",
"trigger",
".",
"sender",
"if",
"(",
"not",
"opt",
".",
"is_nick",
"(",
")",
")",
":",
"if",
"(",
"argc",
"<",
"3",
")",
":",
"return",
"channel",
"=",
"opt",
"banmask",
"=",
"text",
"[",
"2",
"]",
"banmask",
"=",
"configureHostMask",
"(",
"banmask",
")",
"if",
"(",
"banmask",
"==",
"u''",
")",
":",
"return",
"bot",
".",
"write",
"(",
"[",
"u'MODE'",
",",
"channel",
",",
"u'+b'",
",",
"banmask",
"]",
")"
] |
bans a twitter account from using the aoa tool .
|
train
| false
|
11,335
|
def set_close_exec(fd):
raise NotImplementedError()
|
[
"def",
"set_close_exec",
"(",
"fd",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
sets the close-on-exec bit for a file descriptor .
|
train
| false
|
11,337
|
def head_tail(line):
generator = (t.strip() for t in line.split(None, 1))
head = next(generator).strip()
tail = ''
try:
tail = next(generator).strip()
except StopIteration:
pass
return (head, tail)
|
[
"def",
"head_tail",
"(",
"line",
")",
":",
"generator",
"=",
"(",
"t",
".",
"strip",
"(",
")",
"for",
"t",
"in",
"line",
".",
"split",
"(",
"None",
",",
"1",
")",
")",
"head",
"=",
"next",
"(",
"generator",
")",
".",
"strip",
"(",
")",
"tail",
"=",
"''",
"try",
":",
"tail",
"=",
"next",
"(",
"generator",
")",
".",
"strip",
"(",
")",
"except",
"StopIteration",
":",
"pass",
"return",
"(",
"head",
",",
"tail",
")"
] |
returns the first word in line and the rest of line or none if the line is too short .
|
train
| false
|
11,339
|
def negative_edge_cycle(G, weight='weight'):
newnode = generate_unique_node()
G.add_edges_from([(newnode, n) for n in G])
try:
bellman_ford_predecessor_and_distance(G, newnode, weight)
except nx.NetworkXUnbounded:
return True
finally:
G.remove_node(newnode)
return False
|
[
"def",
"negative_edge_cycle",
"(",
"G",
",",
"weight",
"=",
"'weight'",
")",
":",
"newnode",
"=",
"generate_unique_node",
"(",
")",
"G",
".",
"add_edges_from",
"(",
"[",
"(",
"newnode",
",",
"n",
")",
"for",
"n",
"in",
"G",
"]",
")",
"try",
":",
"bellman_ford_predecessor_and_distance",
"(",
"G",
",",
"newnode",
",",
"weight",
")",
"except",
"nx",
".",
"NetworkXUnbounded",
":",
"return",
"True",
"finally",
":",
"G",
".",
"remove_node",
"(",
"newnode",
")",
"return",
"False"
] |
return true if there exists a negative edge cycle anywhere in g .
|
train
| false
|
11,340
|
def setup_work_direc():
work_dir = tempfile.mkdtemp('work')
backup_dir = os.path.join(work_dir, 'backup')
return mock.MagicMock(work_dir=work_dir, backup_dir=backup_dir, temp_checkpoint_dir=os.path.join(work_dir, 'temp'), in_progress_dir=os.path.join(backup_dir, 'in_progress_dir'))
|
[
"def",
"setup_work_direc",
"(",
")",
":",
"work_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"'work'",
")",
"backup_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"work_dir",
",",
"'backup'",
")",
"return",
"mock",
".",
"MagicMock",
"(",
"work_dir",
"=",
"work_dir",
",",
"backup_dir",
"=",
"backup_dir",
",",
"temp_checkpoint_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"work_dir",
",",
"'temp'",
")",
",",
"in_progress_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"backup_dir",
",",
"'in_progress_dir'",
")",
")"
] |
setup directories .
|
train
| false
|
11,341
|
def softplus(x):
return theano.tensor.nnet.softplus(x)
|
[
"def",
"softplus",
"(",
"x",
")",
":",
"return",
"theano",
".",
"tensor",
".",
"nnet",
".",
"softplus",
"(",
"x",
")"
] |
softplus of a tensor .
|
train
| false
|
11,342
|
def getLogMessage(commitSHA):
output = check_output(['git', 'log', '--format=%B', '-n', '1', commitSHA])
return output.strip()
|
[
"def",
"getLogMessage",
"(",
"commitSHA",
")",
":",
"output",
"=",
"check_output",
"(",
"[",
"'git'",
",",
"'log'",
",",
"'--format=%B'",
",",
"'-n'",
",",
"'1'",
",",
"commitSHA",
"]",
")",
"return",
"output",
".",
"strip",
"(",
")"
] |
get the log message for a given commit hash .
|
train
| false
|
11,343
|
def default_logging_config():
remove_null_handler()
logging.basicConfig(level=logging.INFO, stream=sys.stderr, format='%(asctime)s %(levelname)s: %(message)s')
|
[
"def",
"default_logging_config",
"(",
")",
":",
"remove_null_handler",
"(",
")",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"logging",
".",
"INFO",
",",
"stream",
"=",
"sys",
".",
"stderr",
",",
"format",
"=",
"'%(asctime)s %(levelname)s: %(message)s'",
")"
] |
set up the default dulwich loggers .
|
train
| false
|
11,344
|
@composite
def related_deployments_strategy(draw, number_of_deployments):
node_uuid_pool = draw(node_uuid_pool_strategy())
deployments = set()
while True:
deployments.add(draw(deployment_strategy(node_uuid_pool=node_uuid_pool)))
if (len(deployments) == number_of_deployments):
return tuple(deployments)
|
[
"@",
"composite",
"def",
"related_deployments_strategy",
"(",
"draw",
",",
"number_of_deployments",
")",
":",
"node_uuid_pool",
"=",
"draw",
"(",
"node_uuid_pool_strategy",
"(",
")",
")",
"deployments",
"=",
"set",
"(",
")",
"while",
"True",
":",
"deployments",
".",
"add",
"(",
"draw",
"(",
"deployment_strategy",
"(",
"node_uuid_pool",
"=",
"node_uuid_pool",
")",
")",
")",
"if",
"(",
"len",
"(",
"deployments",
")",
"==",
"number_of_deployments",
")",
":",
"return",
"tuple",
"(",
"deployments",
")"
] |
a strategy to generate more than 1 unique deployments that are related .
|
train
| false
|
11,345
|
def calculate_tree_hash(body):
chunks = []
required_chunk_size = (1024 * 1024)
sha256 = hashlib.sha256
for chunk in iter((lambda : body.read(required_chunk_size)), ''):
chunks.append(sha256(chunk).digest())
if (not chunks):
return sha256('').hexdigest()
while (len(chunks) > 1):
new_chunks = []
for (first, second) in _in_pairs(chunks):
if (second is not None):
new_chunks.append(sha256((first + second)).digest())
else:
new_chunks.append(first)
chunks = new_chunks
return binascii.hexlify(chunks[0]).decode('ascii')
|
[
"def",
"calculate_tree_hash",
"(",
"body",
")",
":",
"chunks",
"=",
"[",
"]",
"required_chunk_size",
"=",
"(",
"1024",
"*",
"1024",
")",
"sha256",
"=",
"hashlib",
".",
"sha256",
"for",
"chunk",
"in",
"iter",
"(",
"(",
"lambda",
":",
"body",
".",
"read",
"(",
"required_chunk_size",
")",
")",
",",
"''",
")",
":",
"chunks",
".",
"append",
"(",
"sha256",
"(",
"chunk",
")",
".",
"digest",
"(",
")",
")",
"if",
"(",
"not",
"chunks",
")",
":",
"return",
"sha256",
"(",
"''",
")",
".",
"hexdigest",
"(",
")",
"while",
"(",
"len",
"(",
"chunks",
")",
">",
"1",
")",
":",
"new_chunks",
"=",
"[",
"]",
"for",
"(",
"first",
",",
"second",
")",
"in",
"_in_pairs",
"(",
"chunks",
")",
":",
"if",
"(",
"second",
"is",
"not",
"None",
")",
":",
"new_chunks",
".",
"append",
"(",
"sha256",
"(",
"(",
"first",
"+",
"second",
")",
")",
".",
"digest",
"(",
")",
")",
"else",
":",
"new_chunks",
".",
"append",
"(",
"first",
")",
"chunks",
"=",
"new_chunks",
"return",
"binascii",
".",
"hexlify",
"(",
"chunks",
"[",
"0",
"]",
")",
".",
"decode",
"(",
"'ascii'",
")"
] |
calculate a tree hash checksum .
|
train
| false
|
11,347
|
def _get_free_lun(client, host, multiattach_enabled, mappings):
if (not _is_host_full(client, host)):
unused_luns = _get_unused_lun_ids(mappings)
if unused_luns:
chosen_lun = random.sample(unused_luns, 1)
return chosen_lun[0]
elif multiattach_enabled:
msg = _('No unused LUN IDs are available on the host; multiattach is enabled which requires that all LUN IDs to be unique across the entire host group.')
raise exception.NetAppDriverException(msg)
used_lun_counts = _get_used_lun_id_counter(mappings)
for (lun_id, __) in reversed(used_lun_counts.most_common()):
if _is_lun_id_available_on_host(client, host, lun_id):
return lun_id
msg = _('No free LUN IDs left. Maximum number of volumes that can be attached to host (%s) has been exceeded.')
raise exception.NetAppDriverException((msg % utils.MAX_LUNS_PER_HOST))
|
[
"def",
"_get_free_lun",
"(",
"client",
",",
"host",
",",
"multiattach_enabled",
",",
"mappings",
")",
":",
"if",
"(",
"not",
"_is_host_full",
"(",
"client",
",",
"host",
")",
")",
":",
"unused_luns",
"=",
"_get_unused_lun_ids",
"(",
"mappings",
")",
"if",
"unused_luns",
":",
"chosen_lun",
"=",
"random",
".",
"sample",
"(",
"unused_luns",
",",
"1",
")",
"return",
"chosen_lun",
"[",
"0",
"]",
"elif",
"multiattach_enabled",
":",
"msg",
"=",
"_",
"(",
"'No unused LUN IDs are available on the host; multiattach is enabled which requires that all LUN IDs to be unique across the entire host group.'",
")",
"raise",
"exception",
".",
"NetAppDriverException",
"(",
"msg",
")",
"used_lun_counts",
"=",
"_get_used_lun_id_counter",
"(",
"mappings",
")",
"for",
"(",
"lun_id",
",",
"__",
")",
"in",
"reversed",
"(",
"used_lun_counts",
".",
"most_common",
"(",
")",
")",
":",
"if",
"_is_lun_id_available_on_host",
"(",
"client",
",",
"host",
",",
"lun_id",
")",
":",
"return",
"lun_id",
"msg",
"=",
"_",
"(",
"'No free LUN IDs left. Maximum number of volumes that can be attached to host (%s) has been exceeded.'",
")",
"raise",
"exception",
".",
"NetAppDriverException",
"(",
"(",
"msg",
"%",
"utils",
".",
"MAX_LUNS_PER_HOST",
")",
")"
] |
returns least used lun id available on the given host .
|
train
| false
|
11,348
|
def fixLimits():
debug('*** Setting resource limits\n')
try:
rlimitTestAndSet(RLIMIT_NPROC, 8192)
rlimitTestAndSet(RLIMIT_NOFILE, 16384)
sysctlTestAndSet('fs.file-max', 10000)
sysctlTestAndSet('net.core.wmem_max', 16777216)
sysctlTestAndSet('net.core.rmem_max', 16777216)
sysctlTestAndSet('net.ipv4.tcp_rmem', '10240 87380 16777216')
sysctlTestAndSet('net.ipv4.tcp_wmem', '10240 87380 16777216')
sysctlTestAndSet('net.core.netdev_max_backlog', 5000)
sysctlTestAndSet('net.ipv4.neigh.default.gc_thresh1', 4096)
sysctlTestAndSet('net.ipv4.neigh.default.gc_thresh2', 8192)
sysctlTestAndSet('net.ipv4.neigh.default.gc_thresh3', 16384)
sysctlTestAndSet('net.ipv4.route.max_size', 32768)
sysctlTestAndSet('kernel.pty.max', 20000)
except Exception:
warn("*** Error setting resource limits. Mininet's performance may be affected.\n")
|
[
"def",
"fixLimits",
"(",
")",
":",
"debug",
"(",
"'*** Setting resource limits\\n'",
")",
"try",
":",
"rlimitTestAndSet",
"(",
"RLIMIT_NPROC",
",",
"8192",
")",
"rlimitTestAndSet",
"(",
"RLIMIT_NOFILE",
",",
"16384",
")",
"sysctlTestAndSet",
"(",
"'fs.file-max'",
",",
"10000",
")",
"sysctlTestAndSet",
"(",
"'net.core.wmem_max'",
",",
"16777216",
")",
"sysctlTestAndSet",
"(",
"'net.core.rmem_max'",
",",
"16777216",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.tcp_rmem'",
",",
"'10240 87380 16777216'",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.tcp_wmem'",
",",
"'10240 87380 16777216'",
")",
"sysctlTestAndSet",
"(",
"'net.core.netdev_max_backlog'",
",",
"5000",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.neigh.default.gc_thresh1'",
",",
"4096",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.neigh.default.gc_thresh2'",
",",
"8192",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.neigh.default.gc_thresh3'",
",",
"16384",
")",
"sysctlTestAndSet",
"(",
"'net.ipv4.route.max_size'",
",",
"32768",
")",
"sysctlTestAndSet",
"(",
"'kernel.pty.max'",
",",
"20000",
")",
"except",
"Exception",
":",
"warn",
"(",
"\"*** Error setting resource limits. Mininet's performance may be affected.\\n\"",
")"
] |
fix ridiculously small resource limits .
|
train
| false
|
11,349
|
@check_local_site_access
def user_file_attachment(request, file_attachment_uuid, username, local_site=None):
file_attachment = get_object_or_404(FileAttachment, uuid=file_attachment_uuid, user__username=username, local_site=local_site, file__isnull=False)
return redirect(file_attachment)
|
[
"@",
"check_local_site_access",
"def",
"user_file_attachment",
"(",
"request",
",",
"file_attachment_uuid",
",",
"username",
",",
"local_site",
"=",
"None",
")",
":",
"file_attachment",
"=",
"get_object_or_404",
"(",
"FileAttachment",
",",
"uuid",
"=",
"file_attachment_uuid",
",",
"user__username",
"=",
"username",
",",
"local_site",
"=",
"local_site",
",",
"file__isnull",
"=",
"False",
")",
"return",
"redirect",
"(",
"file_attachment",
")"
] |
redirect to the file attachments url given its uuid .
|
train
| false
|
11,350
|
@must_be_valid_project
@must_be_contributor_or_public
def node_identifiers_get(node, **kwargs):
if (not node.is_public):
raise HTTPError(http.BAD_REQUEST)
return {'doi': node.get_identifier_value('doi'), 'ark': node.get_identifier_value('ark')}
|
[
"@",
"must_be_valid_project",
"@",
"must_be_contributor_or_public",
"def",
"node_identifiers_get",
"(",
"node",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"node",
".",
"is_public",
")",
":",
"raise",
"HTTPError",
"(",
"http",
".",
"BAD_REQUEST",
")",
"return",
"{",
"'doi'",
":",
"node",
".",
"get_identifier_value",
"(",
"'doi'",
")",
",",
"'ark'",
":",
"node",
".",
"get_identifier_value",
"(",
"'ark'",
")",
"}"
] |
retrieve identifiers for a node .
|
train
| false
|
11,351
|
def ellip_normal(h2, k2, n, p):
with np.errstate(all='ignore'):
return _ellip_normal_vec(h2, k2, n, p)
|
[
"def",
"ellip_normal",
"(",
"h2",
",",
"k2",
",",
"n",
",",
"p",
")",
":",
"with",
"np",
".",
"errstate",
"(",
"all",
"=",
"'ignore'",
")",
":",
"return",
"_ellip_normal_vec",
"(",
"h2",
",",
"k2",
",",
"n",
",",
"p",
")"
] |
ellipsoidal harmonic normalization constants gamma^p_n the normalization constant is defined as .
|
train
| false
|
11,352
|
def top_contributors_aoa(start=None, end=None, locale=None, count=10, page=1):
query = ReplyMetricsMappingType.search().facet('creator_id', filtered=True, size=BIG_NUMBER)
locale = (locale.split('-')[0] if locale else None)
query = _apply_filters(query, start, end, locale)
return _get_creator_counts(query, count, page)
|
[
"def",
"top_contributors_aoa",
"(",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"locale",
"=",
"None",
",",
"count",
"=",
"10",
",",
"page",
"=",
"1",
")",
":",
"query",
"=",
"ReplyMetricsMappingType",
".",
"search",
"(",
")",
".",
"facet",
"(",
"'creator_id'",
",",
"filtered",
"=",
"True",
",",
"size",
"=",
"BIG_NUMBER",
")",
"locale",
"=",
"(",
"locale",
".",
"split",
"(",
"'-'",
")",
"[",
"0",
"]",
"if",
"locale",
"else",
"None",
")",
"query",
"=",
"_apply_filters",
"(",
"query",
",",
"start",
",",
"end",
",",
"locale",
")",
"return",
"_get_creator_counts",
"(",
"query",
",",
"count",
",",
"page",
")"
] |
get the top army of awesome contributors .
|
train
| false
|
11,353
|
def p_selection_statement_1(t):
pass
|
[
"def",
"p_selection_statement_1",
"(",
"t",
")",
":",
"pass"
] |
selection_statement : if lparen expression rparen statement .
|
train
| false
|
11,357
|
def get_payload_struct(context, builder, set_type, ptr):
payload_type = types.SetPayload(set_type)
ptrty = context.get_data_type(payload_type).as_pointer()
payload = builder.bitcast(ptr, ptrty)
return context.make_data_helper(builder, payload_type, ref=payload)
|
[
"def",
"get_payload_struct",
"(",
"context",
",",
"builder",
",",
"set_type",
",",
"ptr",
")",
":",
"payload_type",
"=",
"types",
".",
"SetPayload",
"(",
"set_type",
")",
"ptrty",
"=",
"context",
".",
"get_data_type",
"(",
"payload_type",
")",
".",
"as_pointer",
"(",
")",
"payload",
"=",
"builder",
".",
"bitcast",
"(",
"ptr",
",",
"ptrty",
")",
"return",
"context",
".",
"make_data_helper",
"(",
"builder",
",",
"payload_type",
",",
"ref",
"=",
"payload",
")"
] |
given a set value and type .
|
train
| false
|
11,358
|
def loadImageSeries(filelist=None):
if ((filelist is None) or (len(filelist) < 1)):
return
imglist = []
for img in filelist:
if (not os.path.exists(img)):
print(('unable to find %s' % img))
continue
try:
im = Image.open(img).convert2byte()
except:
if (not isSpiderImage(img)):
print((img + ' is not a Spider image file'))
continue
im.info['filename'] = img
imglist.append(im)
return imglist
|
[
"def",
"loadImageSeries",
"(",
"filelist",
"=",
"None",
")",
":",
"if",
"(",
"(",
"filelist",
"is",
"None",
")",
"or",
"(",
"len",
"(",
"filelist",
")",
"<",
"1",
")",
")",
":",
"return",
"imglist",
"=",
"[",
"]",
"for",
"img",
"in",
"filelist",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"img",
")",
")",
":",
"print",
"(",
"(",
"'unable to find %s'",
"%",
"img",
")",
")",
"continue",
"try",
":",
"im",
"=",
"Image",
".",
"open",
"(",
"img",
")",
".",
"convert2byte",
"(",
")",
"except",
":",
"if",
"(",
"not",
"isSpiderImage",
"(",
"img",
")",
")",
":",
"print",
"(",
"(",
"img",
"+",
"' is not a Spider image file'",
")",
")",
"continue",
"im",
".",
"info",
"[",
"'filename'",
"]",
"=",
"img",
"imglist",
".",
"append",
"(",
"im",
")",
"return",
"imglist"
] |
create a list of image .
|
train
| false
|
11,359
|
@Profiler.profile
def test_orm_query_cols_only(n):
session = Session(bind=engine)
for id_ in random.sample(ids, n):
session.query(Customer.id, Customer.name, Customer.description).filter((Customer.id == id_)).one()
|
[
"@",
"Profiler",
".",
"profile",
"def",
"test_orm_query_cols_only",
"(",
"n",
")",
":",
"session",
"=",
"Session",
"(",
"bind",
"=",
"engine",
")",
"for",
"id_",
"in",
"random",
".",
"sample",
"(",
"ids",
",",
"n",
")",
":",
"session",
".",
"query",
"(",
"Customer",
".",
"id",
",",
"Customer",
".",
"name",
",",
"Customer",
".",
"description",
")",
".",
"filter",
"(",
"(",
"Customer",
".",
"id",
"==",
"id_",
")",
")",
".",
"one",
"(",
")"
] |
test an orm query of only the entity columns .
|
train
| false
|
11,360
|
def to_ctypes(ty):
assert isinstance(ty, types.Type), ty
if (ty is types.none):
return None
def _convert_internal(ty):
if isinstance(ty, types.CPointer):
return ctypes.POINTER(_convert_internal(ty.dtype))
else:
return _TO_CTYPES.get(ty)
ctypeobj = _convert_internal(ty)
if (ctypeobj is None):
raise TypeError(("Cannot convert Numba type '%s' to ctypes type" % (ty,)))
return ctypeobj
|
[
"def",
"to_ctypes",
"(",
"ty",
")",
":",
"assert",
"isinstance",
"(",
"ty",
",",
"types",
".",
"Type",
")",
",",
"ty",
"if",
"(",
"ty",
"is",
"types",
".",
"none",
")",
":",
"return",
"None",
"def",
"_convert_internal",
"(",
"ty",
")",
":",
"if",
"isinstance",
"(",
"ty",
",",
"types",
".",
"CPointer",
")",
":",
"return",
"ctypes",
".",
"POINTER",
"(",
"_convert_internal",
"(",
"ty",
".",
"dtype",
")",
")",
"else",
":",
"return",
"_TO_CTYPES",
".",
"get",
"(",
"ty",
")",
"ctypeobj",
"=",
"_convert_internal",
"(",
"ty",
")",
"if",
"(",
"ctypeobj",
"is",
"None",
")",
":",
"raise",
"TypeError",
"(",
"(",
"\"Cannot convert Numba type '%s' to ctypes type\"",
"%",
"(",
"ty",
",",
")",
")",
")",
"return",
"ctypeobj"
] |
convert the given numba type to a ctypes type .
|
train
| false
|
11,361
|
def certify_callback(event, context):
lh = LambdaHandler()
return lh.update_certificate()
|
[
"def",
"certify_callback",
"(",
"event",
",",
"context",
")",
":",
"lh",
"=",
"LambdaHandler",
"(",
")",
"return",
"lh",
".",
"update_certificate",
"(",
")"
] |
load our lh settings and update our cert .
|
train
| false
|
11,362
|
def load_pack_index_file(path, f):
(contents, size) = _load_file_contents(f)
if (contents[:4] == '\xfftOc'):
version = struct.unpack('>L', contents[4:8])[0]
if (version == 2):
return PackIndex2(path, file=f, contents=contents, size=size)
else:
raise KeyError(('Unknown pack index format %d' % version))
else:
return PackIndex1(path, file=f, contents=contents, size=size)
|
[
"def",
"load_pack_index_file",
"(",
"path",
",",
"f",
")",
":",
"(",
"contents",
",",
"size",
")",
"=",
"_load_file_contents",
"(",
"f",
")",
"if",
"(",
"contents",
"[",
":",
"4",
"]",
"==",
"'\\xfftOc'",
")",
":",
"version",
"=",
"struct",
".",
"unpack",
"(",
"'>L'",
",",
"contents",
"[",
"4",
":",
"8",
"]",
")",
"[",
"0",
"]",
"if",
"(",
"version",
"==",
"2",
")",
":",
"return",
"PackIndex2",
"(",
"path",
",",
"file",
"=",
"f",
",",
"contents",
"=",
"contents",
",",
"size",
"=",
"size",
")",
"else",
":",
"raise",
"KeyError",
"(",
"(",
"'Unknown pack index format %d'",
"%",
"version",
")",
")",
"else",
":",
"return",
"PackIndex1",
"(",
"path",
",",
"file",
"=",
"f",
",",
"contents",
"=",
"contents",
",",
"size",
"=",
"size",
")"
] |
load an index file from a file-like object .
|
train
| false
|
11,364
|
def test_fixes():
def run():
header = get_pkg_data_contents(u'data/nonstandard_units.hdr', encoding=u'binary')
try:
w = wcs.WCS(header, translate_units=u'dhs')
except wcs.InvalidTransformError:
pass
else:
assert False, u'Expected InvalidTransformError'
with catch_warnings(wcs.FITSFixedWarning) as w:
run()
assert (len(w) == 2)
for item in w:
if (u'unitfix' in str(item.message)):
assert (u'Hz' in str(item.message))
assert (u'M/S' in str(item.message))
assert (u'm/s' in str(item.message))
|
[
"def",
"test_fixes",
"(",
")",
":",
"def",
"run",
"(",
")",
":",
"header",
"=",
"get_pkg_data_contents",
"(",
"u'data/nonstandard_units.hdr'",
",",
"encoding",
"=",
"u'binary'",
")",
"try",
":",
"w",
"=",
"wcs",
".",
"WCS",
"(",
"header",
",",
"translate_units",
"=",
"u'dhs'",
")",
"except",
"wcs",
".",
"InvalidTransformError",
":",
"pass",
"else",
":",
"assert",
"False",
",",
"u'Expected InvalidTransformError'",
"with",
"catch_warnings",
"(",
"wcs",
".",
"FITSFixedWarning",
")",
"as",
"w",
":",
"run",
"(",
")",
"assert",
"(",
"len",
"(",
"w",
")",
"==",
"2",
")",
"for",
"item",
"in",
"w",
":",
"if",
"(",
"u'unitfix'",
"in",
"str",
"(",
"item",
".",
"message",
")",
")",
":",
"assert",
"(",
"u'Hz'",
"in",
"str",
"(",
"item",
".",
"message",
")",
")",
"assert",
"(",
"u'M/S'",
"in",
"str",
"(",
"item",
".",
"message",
")",
")",
"assert",
"(",
"u'm/s'",
"in",
"str",
"(",
"item",
".",
"message",
")",
")"
] |
from github issue #36 .
|
train
| false
|
11,366
|
def write_root_rels(workbook):
root = Element('Relationships', xmlns=PKG_REL_NS)
relation_tag = ('{%s}Relationship' % PKG_REL_NS)
rel = Relationship(type='officeDocument', target=ARC_WORKBOOK, id='rId1')
root.append(rel.to_tree())
rel = Relationship('', target=ARC_CORE, id='rId2')
rel.type = ('%s/metadata/core-properties' % PKG_REL_NS)
root.append(rel.to_tree())
rel = Relationship('extended-properties', target=ARC_APP, id='rId3')
root.append(rel.to_tree())
if (workbook.vba_archive is not None):
arc = fromstring(workbook.vba_archive.read(ARC_ROOT_RELS))
rels = arc.findall(relation_tag)
rId = None
for rel in rels:
if (rel.get('Target') == ARC_CUSTOM_UI):
rId = rel.get('Id')
break
if (rId is not None):
vba = Relationship('', target=ARC_CUSTOM_UI, id=rId)
vba.type = CUSTOMUI_NS
root.append(vba.to_tree())
return tostring(root)
|
[
"def",
"write_root_rels",
"(",
"workbook",
")",
":",
"root",
"=",
"Element",
"(",
"'Relationships'",
",",
"xmlns",
"=",
"PKG_REL_NS",
")",
"relation_tag",
"=",
"(",
"'{%s}Relationship'",
"%",
"PKG_REL_NS",
")",
"rel",
"=",
"Relationship",
"(",
"type",
"=",
"'officeDocument'",
",",
"target",
"=",
"ARC_WORKBOOK",
",",
"id",
"=",
"'rId1'",
")",
"root",
".",
"append",
"(",
"rel",
".",
"to_tree",
"(",
")",
")",
"rel",
"=",
"Relationship",
"(",
"''",
",",
"target",
"=",
"ARC_CORE",
",",
"id",
"=",
"'rId2'",
")",
"rel",
".",
"type",
"=",
"(",
"'%s/metadata/core-properties'",
"%",
"PKG_REL_NS",
")",
"root",
".",
"append",
"(",
"rel",
".",
"to_tree",
"(",
")",
")",
"rel",
"=",
"Relationship",
"(",
"'extended-properties'",
",",
"target",
"=",
"ARC_APP",
",",
"id",
"=",
"'rId3'",
")",
"root",
".",
"append",
"(",
"rel",
".",
"to_tree",
"(",
")",
")",
"if",
"(",
"workbook",
".",
"vba_archive",
"is",
"not",
"None",
")",
":",
"arc",
"=",
"fromstring",
"(",
"workbook",
".",
"vba_archive",
".",
"read",
"(",
"ARC_ROOT_RELS",
")",
")",
"rels",
"=",
"arc",
".",
"findall",
"(",
"relation_tag",
")",
"rId",
"=",
"None",
"for",
"rel",
"in",
"rels",
":",
"if",
"(",
"rel",
".",
"get",
"(",
"'Target'",
")",
"==",
"ARC_CUSTOM_UI",
")",
":",
"rId",
"=",
"rel",
".",
"get",
"(",
"'Id'",
")",
"break",
"if",
"(",
"rId",
"is",
"not",
"None",
")",
":",
"vba",
"=",
"Relationship",
"(",
"''",
",",
"target",
"=",
"ARC_CUSTOM_UI",
",",
"id",
"=",
"rId",
")",
"vba",
".",
"type",
"=",
"CUSTOMUI_NS",
"root",
".",
"append",
"(",
"vba",
".",
"to_tree",
"(",
")",
")",
"return",
"tostring",
"(",
"root",
")"
] |
write the relationships xml .
|
train
| false
|
11,369
|
def load_default_config(ipython_dir=None):
if (ipython_dir is None):
ipython_dir = get_ipython_dir()
profile_dir = os.path.join(ipython_dir, 'profile_default')
config = Config()
for cf in Application._load_config_files('ipython_config', path=profile_dir):
config.update(cf)
return config
|
[
"def",
"load_default_config",
"(",
"ipython_dir",
"=",
"None",
")",
":",
"if",
"(",
"ipython_dir",
"is",
"None",
")",
":",
"ipython_dir",
"=",
"get_ipython_dir",
"(",
")",
"profile_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"ipython_dir",
",",
"'profile_default'",
")",
"config",
"=",
"Config",
"(",
")",
"for",
"cf",
"in",
"Application",
".",
"_load_config_files",
"(",
"'ipython_config'",
",",
"path",
"=",
"profile_dir",
")",
":",
"config",
".",
"update",
"(",
"cf",
")",
"return",
"config"
] |
load the default config file from the default ipython_dir .
|
train
| false
|
11,370
|
def _MakeSyncCall(service, call, request, response, config=None):
conn = _GetConnection()
if isinstance(request, datastore_pb.Query):
conn._set_request_read_policy(request, config)
conn._set_request_transaction(request)
rpc = conn.make_rpc_call(config, call, request, response)
conn.check_rpc_success(rpc)
return response
|
[
"def",
"_MakeSyncCall",
"(",
"service",
",",
"call",
",",
"request",
",",
"response",
",",
"config",
"=",
"None",
")",
":",
"conn",
"=",
"_GetConnection",
"(",
")",
"if",
"isinstance",
"(",
"request",
",",
"datastore_pb",
".",
"Query",
")",
":",
"conn",
".",
"_set_request_read_policy",
"(",
"request",
",",
"config",
")",
"conn",
".",
"_set_request_transaction",
"(",
"request",
")",
"rpc",
"=",
"conn",
".",
"make_rpc_call",
"(",
"config",
",",
"call",
",",
"request",
",",
"response",
")",
"conn",
".",
"check_rpc_success",
"(",
"rpc",
")",
"return",
"response"
] |
the apiproxy entry point for a synchronous api call .
|
train
| false
|
11,371
|
def digest_integer(m):
from hashlib import sha1
return string_to_int(sha1(int_to_string(m)).digest())
|
[
"def",
"digest_integer",
"(",
"m",
")",
":",
"from",
"hashlib",
"import",
"sha1",
"return",
"string_to_int",
"(",
"sha1",
"(",
"int_to_string",
"(",
"m",
")",
")",
".",
"digest",
"(",
")",
")"
] |
convert an integer into a string of bytes .
|
train
| false
|
11,372
|
def get_volume_metadata(volume):
volume_metadata = volume.get('volume_metadata', {})
return {item['key']: item['value'] for item in volume_metadata}
|
[
"def",
"get_volume_metadata",
"(",
"volume",
")",
":",
"volume_metadata",
"=",
"volume",
".",
"get",
"(",
"'volume_metadata'",
",",
"{",
"}",
")",
"return",
"{",
"item",
"[",
"'key'",
"]",
":",
"item",
"[",
"'value'",
"]",
"for",
"item",
"in",
"volume_metadata",
"}"
] |
return a dictionary of the metadata of the specified volume .
|
train
| false
|
11,373
|
def localTranslation():
if getattr(threadLocal, 'currentTranslation', None):
return threadLocal.currentTranslation
else:
return currentTranslation
|
[
"def",
"localTranslation",
"(",
")",
":",
"if",
"getattr",
"(",
"threadLocal",
",",
"'currentTranslation'",
",",
"None",
")",
":",
"return",
"threadLocal",
".",
"currentTranslation",
"else",
":",
"return",
"currentTranslation"
] |
return the translation local to this thread .
|
train
| false
|
11,374
|
def send_notif_for_after_purchase_organizer(user, invoice_id, order_url, event_name, buyer_email):
send_notification(user=user, action=NOTIF_TICKET_PURCHASED_ORGANIZER, title=NOTIFS[NOTIF_TICKET_PURCHASED_ORGANIZER]['title'].format(invoice_id=invoice_id, event_name=event_name, buyer_email=buyer_email), message=NOTIFS[NOTIF_TICKET_PURCHASED_ORGANIZER]['message'].format(order_url=order_url))
|
[
"def",
"send_notif_for_after_purchase_organizer",
"(",
"user",
",",
"invoice_id",
",",
"order_url",
",",
"event_name",
",",
"buyer_email",
")",
":",
"send_notification",
"(",
"user",
"=",
"user",
",",
"action",
"=",
"NOTIF_TICKET_PURCHASED_ORGANIZER",
",",
"title",
"=",
"NOTIFS",
"[",
"NOTIF_TICKET_PURCHASED_ORGANIZER",
"]",
"[",
"'title'",
"]",
".",
"format",
"(",
"invoice_id",
"=",
"invoice_id",
",",
"event_name",
"=",
"event_name",
",",
"buyer_email",
"=",
"buyer_email",
")",
",",
"message",
"=",
"NOTIFS",
"[",
"NOTIF_TICKET_PURCHASED_ORGANIZER",
"]",
"[",
"'message'",
"]",
".",
"format",
"(",
"order_url",
"=",
"order_url",
")",
")"
] |
send notification with order invoice link after purchase .
|
train
| false
|
11,375
|
def path_separator():
return PATH_SEPARATOR[_os.name]
|
[
"def",
"path_separator",
"(",
")",
":",
"return",
"PATH_SEPARATOR",
"[",
"_os",
".",
"name",
"]"
] |
always use / as path separator for consistency .
|
train
| false
|
11,380
|
def fill(text, width=70, **kwargs):
w = TextWrapper(width=width, **kwargs)
return w.fill(text)
|
[
"def",
"fill",
"(",
"text",
",",
"width",
"=",
"70",
",",
"**",
"kwargs",
")",
":",
"w",
"=",
"TextWrapper",
"(",
"width",
"=",
"width",
",",
"**",
"kwargs",
")",
"return",
"w",
".",
"fill",
"(",
"text",
")"
] |
make one string from sequence of strings .
|
train
| true
|
11,383
|
def _locate_roles_and_methods(cls):
roles = {}
methods = {}
for supercls in cls.__mro__:
for (name, method) in vars(supercls).items():
if (not util.callable(method)):
continue
if hasattr(method, '_sa_instrument_role'):
role = method._sa_instrument_role
assert (role in ('appender', 'remover', 'iterator', 'linker', 'converter'))
roles.setdefault(role, name)
(before, after) = (None, None)
if hasattr(method, '_sa_instrument_before'):
(op, argument) = method._sa_instrument_before
assert (op in ('fire_append_event', 'fire_remove_event'))
before = (op, argument)
if hasattr(method, '_sa_instrument_after'):
op = method._sa_instrument_after
assert (op in ('fire_append_event', 'fire_remove_event'))
after = op
if before:
methods[name] = (before + (after,))
elif after:
methods[name] = (None, None, after)
return (roles, methods)
|
[
"def",
"_locate_roles_and_methods",
"(",
"cls",
")",
":",
"roles",
"=",
"{",
"}",
"methods",
"=",
"{",
"}",
"for",
"supercls",
"in",
"cls",
".",
"__mro__",
":",
"for",
"(",
"name",
",",
"method",
")",
"in",
"vars",
"(",
"supercls",
")",
".",
"items",
"(",
")",
":",
"if",
"(",
"not",
"util",
".",
"callable",
"(",
"method",
")",
")",
":",
"continue",
"if",
"hasattr",
"(",
"method",
",",
"'_sa_instrument_role'",
")",
":",
"role",
"=",
"method",
".",
"_sa_instrument_role",
"assert",
"(",
"role",
"in",
"(",
"'appender'",
",",
"'remover'",
",",
"'iterator'",
",",
"'linker'",
",",
"'converter'",
")",
")",
"roles",
".",
"setdefault",
"(",
"role",
",",
"name",
")",
"(",
"before",
",",
"after",
")",
"=",
"(",
"None",
",",
"None",
")",
"if",
"hasattr",
"(",
"method",
",",
"'_sa_instrument_before'",
")",
":",
"(",
"op",
",",
"argument",
")",
"=",
"method",
".",
"_sa_instrument_before",
"assert",
"(",
"op",
"in",
"(",
"'fire_append_event'",
",",
"'fire_remove_event'",
")",
")",
"before",
"=",
"(",
"op",
",",
"argument",
")",
"if",
"hasattr",
"(",
"method",
",",
"'_sa_instrument_after'",
")",
":",
"op",
"=",
"method",
".",
"_sa_instrument_after",
"assert",
"(",
"op",
"in",
"(",
"'fire_append_event'",
",",
"'fire_remove_event'",
")",
")",
"after",
"=",
"op",
"if",
"before",
":",
"methods",
"[",
"name",
"]",
"=",
"(",
"before",
"+",
"(",
"after",
",",
")",
")",
"elif",
"after",
":",
"methods",
"[",
"name",
"]",
"=",
"(",
"None",
",",
"None",
",",
"after",
")",
"return",
"(",
"roles",
",",
"methods",
")"
] |
search for _sa_instrument_role-decorated methods in method resolution order .
|
train
| false
|
11,384
|
def set_resource(resource):
resource.poc = get_random_user()
resource.metadata_author = get_random_user()
assign_random_category(resource)
assign_regions(resource)
|
[
"def",
"set_resource",
"(",
"resource",
")",
":",
"resource",
".",
"poc",
"=",
"get_random_user",
"(",
")",
"resource",
".",
"metadata_author",
"=",
"get_random_user",
"(",
")",
"assign_random_category",
"(",
"resource",
")",
"assign_regions",
"(",
"resource",
")"
] |
assign poc .
|
train
| false
|
11,386
|
@pytest.mark.parametrize('value', ['foo\\bar', 'fo\xc3\xb6\r\nb\xc3\xa4r', 'fo\xc3\xb6\\r\\nb\xc3\xa4r', 'fo\xc3\xb6\r\n\\r\\nb\xc3\xa4r', 'nfo\xc3\xb6\nb\xc3\xa4r', 'nfo\xc3\xb6\\nb\xc3\xa4r', 'fo\xc3\xb6\n\\nb\xc3\xa4r', ['foo\\bar'], ['fo\xc3\xb6\r\nb\xc3\xa4r'], ['fo\xc3\xb6\\r\\nb\xc3\xa4r'], ['fo\xc3\xb6\r\n\\r\\nb\xc3\xa4r', 'b\xc3\xa4r\r\n\\r\\nb\xc3\xa4z'], ['nfo\xc3\xb6\nb\xc3\xa4r'], ['nfo\xc3\xb6\\nb\xc3\xa4r'], ['fo\xc3\xb6\n\\nb\xc3\xa4r', 'b\xc3\xa4r\n\\nb\xc3\xa4z']])
def test_multistringwidget_decompress_multistrings(value):
widget = MultiStringWidget()
expected_value = ([value] if isinstance(value, basestring) else value)
assert (widget.decompress(multistring.multistring(value)) == expected_value)
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'value'",
",",
"[",
"'foo\\\\bar'",
",",
"'fo\\xc3\\xb6\\r\\nb\\xc3\\xa4r'",
",",
"'fo\\xc3\\xb6\\\\r\\\\nb\\xc3\\xa4r'",
",",
"'fo\\xc3\\xb6\\r\\n\\\\r\\\\nb\\xc3\\xa4r'",
",",
"'nfo\\xc3\\xb6\\nb\\xc3\\xa4r'",
",",
"'nfo\\xc3\\xb6\\\\nb\\xc3\\xa4r'",
",",
"'fo\\xc3\\xb6\\n\\\\nb\\xc3\\xa4r'",
",",
"[",
"'foo\\\\bar'",
"]",
",",
"[",
"'fo\\xc3\\xb6\\r\\nb\\xc3\\xa4r'",
"]",
",",
"[",
"'fo\\xc3\\xb6\\\\r\\\\nb\\xc3\\xa4r'",
"]",
",",
"[",
"'fo\\xc3\\xb6\\r\\n\\\\r\\\\nb\\xc3\\xa4r'",
",",
"'b\\xc3\\xa4r\\r\\n\\\\r\\\\nb\\xc3\\xa4z'",
"]",
",",
"[",
"'nfo\\xc3\\xb6\\nb\\xc3\\xa4r'",
"]",
",",
"[",
"'nfo\\xc3\\xb6\\\\nb\\xc3\\xa4r'",
"]",
",",
"[",
"'fo\\xc3\\xb6\\n\\\\nb\\xc3\\xa4r'",
",",
"'b\\xc3\\xa4r\\n\\\\nb\\xc3\\xa4z'",
"]",
"]",
")",
"def",
"test_multistringwidget_decompress_multistrings",
"(",
"value",
")",
":",
"widget",
"=",
"MultiStringWidget",
"(",
")",
"expected_value",
"=",
"(",
"[",
"value",
"]",
"if",
"isinstance",
"(",
"value",
",",
"basestring",
")",
"else",
"value",
")",
"assert",
"(",
"widget",
".",
"decompress",
"(",
"multistring",
".",
"multistring",
"(",
"value",
")",
")",
"==",
"expected_value",
")"
] |
tests units multistringwidget decompresses string values .
|
train
| false
|
11,387
|
def client_read(path, **kwargs):
parts = path.split('/')
config_parts = ['check_names', 'init_configs', 'instances']
(image, config_part) = (parts[(-2)], parts[(-1)])
if ('all' in kwargs):
return {}
else:
return TestServiceDiscovery.mock_tpls.get(image)[0][config_parts.index(config_part)]
|
[
"def",
"client_read",
"(",
"path",
",",
"**",
"kwargs",
")",
":",
"parts",
"=",
"path",
".",
"split",
"(",
"'/'",
")",
"config_parts",
"=",
"[",
"'check_names'",
",",
"'init_configs'",
",",
"'instances'",
"]",
"(",
"image",
",",
"config_part",
")",
"=",
"(",
"parts",
"[",
"(",
"-",
"2",
")",
"]",
",",
"parts",
"[",
"(",
"-",
"1",
")",
"]",
")",
"if",
"(",
"'all'",
"in",
"kwargs",
")",
":",
"return",
"{",
"}",
"else",
":",
"return",
"TestServiceDiscovery",
".",
"mock_tpls",
".",
"get",
"(",
"image",
")",
"[",
"0",
"]",
"[",
"config_parts",
".",
"index",
"(",
"config_part",
")",
"]"
] |
return a mocked string that would normally be read from a config store .
|
train
| false
|
11,388
|
def get_auto_confd_path(osname=None):
return os.path.join(get_confd_path(osname), AUTO_CONFIG_DIR)
|
[
"def",
"get_auto_confd_path",
"(",
"osname",
"=",
"None",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"get_confd_path",
"(",
"osname",
")",
",",
"AUTO_CONFIG_DIR",
")"
] |
used for service discovery which only works for unix .
|
train
| false
|
11,389
|
def guess_even_sampling_depth(counts_per_sample, num_deviations=2.25):
counts_per_sample.sort()
(median_abs_dev, median_count) = median_absolute_deviation(counts_per_sample)
min_threshold = (median_count - (num_deviations * median_abs_dev))
for e in counts_per_sample:
if (e >= min_threshold):
return e
raise ValueError((('No acceptable even sampling depth identified. ' + "It shouldn't be possible to get here, but just in case here's the ") + 'counts per sample: %s '.join(map(str, counts_per_sample))))
|
[
"def",
"guess_even_sampling_depth",
"(",
"counts_per_sample",
",",
"num_deviations",
"=",
"2.25",
")",
":",
"counts_per_sample",
".",
"sort",
"(",
")",
"(",
"median_abs_dev",
",",
"median_count",
")",
"=",
"median_absolute_deviation",
"(",
"counts_per_sample",
")",
"min_threshold",
"=",
"(",
"median_count",
"-",
"(",
"num_deviations",
"*",
"median_abs_dev",
")",
")",
"for",
"e",
"in",
"counts_per_sample",
":",
"if",
"(",
"e",
">=",
"min_threshold",
")",
":",
"return",
"e",
"raise",
"ValueError",
"(",
"(",
"(",
"'No acceptable even sampling depth identified. '",
"+",
"\"It shouldn't be possible to get here, but just in case here's the \"",
")",
"+",
"'counts per sample: %s '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"counts_per_sample",
")",
")",
")",
")"
] |
guess a depth for even sampling this is currently computed as the smallest seqs per sample count >= the median seqs per sample count - .
|
train
| false
|
11,390
|
def format_tb(tb, limit=None):
return format_list(extract_tb(tb, limit))
|
[
"def",
"format_tb",
"(",
"tb",
",",
"limit",
"=",
"None",
")",
":",
"return",
"format_list",
"(",
"extract_tb",
"(",
"tb",
",",
"limit",
")",
")"
] |
a shorthand for format_list(extract_stack) .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.