id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
37,181 | def new(rsa_key):
return PKCS115_SigScheme(rsa_key)
| [
"def",
"new",
"(",
"rsa_key",
")",
":",
"return",
"PKCS115_SigScheme",
"(",
"rsa_key",
")"
] | return a fresh instance of the hash object . | train | false |
37,184 | def _check_for_nonzero_return_code(reason):
m = _BOOTSTRAP_NONZERO_RETURN_CODE_RE.match(reason)
if m:
return _extract_action_num_and_node_id(m)
else:
return None
| [
"def",
"_check_for_nonzero_return_code",
"(",
"reason",
")",
":",
"m",
"=",
"_BOOTSTRAP_NONZERO_RETURN_CODE_RE",
".",
"match",
"(",
"reason",
")",
"if",
"m",
":",
"return",
"_extract_action_num_and_node_id",
"(",
"m",
")",
"else",
":",
"return",
"None"
] | given a reason for cluster termination . | train | false |
37,185 | def getWiddershinsByLength(begin, end, length):
endMinusBegin = (end - begin)
endMinusBeginLength = abs(endMinusBegin)
if (endMinusBeginLength <= 0.0):
return None
endMinusBegin *= (length / endMinusBeginLength)
return complex((- endMinusBegin.imag), endMinusBegin.real)
| [
"def",
"getWiddershinsByLength",
"(",
"begin",
",",
"end",
",",
"length",
")",
":",
"endMinusBegin",
"=",
"(",
"end",
"-",
"begin",
")",
"endMinusBeginLength",
"=",
"abs",
"(",
"endMinusBegin",
")",
"if",
"(",
"endMinusBeginLength",
"<=",
"0.0",
")",
":",
"return",
"None",
"endMinusBegin",
"*=",
"(",
"length",
"/",
"endMinusBeginLength",
")",
"return",
"complex",
"(",
"(",
"-",
"endMinusBegin",
".",
"imag",
")",
",",
"endMinusBegin",
".",
"real",
")"
] | get the widdershins by length . | train | false |
37,186 | def inputhook_wx1(context):
try:
app = wx.GetApp()
if (app is not None):
assert wx.Thread_IsMain()
evtloop = wx.EventLoop()
ea = wx.EventLoopActivator(evtloop)
while evtloop.Pending():
evtloop.Dispatch()
app.ProcessIdle()
del ea
except KeyboardInterrupt:
pass
return 0
| [
"def",
"inputhook_wx1",
"(",
"context",
")",
":",
"try",
":",
"app",
"=",
"wx",
".",
"GetApp",
"(",
")",
"if",
"(",
"app",
"is",
"not",
"None",
")",
":",
"assert",
"wx",
".",
"Thread_IsMain",
"(",
")",
"evtloop",
"=",
"wx",
".",
"EventLoop",
"(",
")",
"ea",
"=",
"wx",
".",
"EventLoopActivator",
"(",
"evtloop",
")",
"while",
"evtloop",
".",
"Pending",
"(",
")",
":",
"evtloop",
".",
"Dispatch",
"(",
")",
"app",
".",
"ProcessIdle",
"(",
")",
"del",
"ea",
"except",
"KeyboardInterrupt",
":",
"pass",
"return",
"0"
] | run the wx event loop by processing pending events only . | train | true |
37,187 | def index_get(index, hosts=None, profile=None):
es = _get_instance(hosts, profile)
try:
if index_exists(index):
ret = es.indices.get(index=index)
return ret
except elasticsearch.exceptions.NotFoundError:
return None
return None
| [
"def",
"index_get",
"(",
"index",
",",
"hosts",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"es",
"=",
"_get_instance",
"(",
"hosts",
",",
"profile",
")",
"try",
":",
"if",
"index_exists",
"(",
"index",
")",
":",
"ret",
"=",
"es",
".",
"indices",
".",
"get",
"(",
"index",
"=",
"index",
")",
"return",
"ret",
"except",
"elasticsearch",
".",
"exceptions",
".",
"NotFoundError",
":",
"return",
"None",
"return",
"None"
] | check for the existence of an index and if it exists . | train | false |
37,188 | def _parse_line(line=''):
parts = line.split()
key = parts.pop(0)
value = ' '.join(parts)
return (key, value)
| [
"def",
"_parse_line",
"(",
"line",
"=",
"''",
")",
":",
"parts",
"=",
"line",
".",
"split",
"(",
")",
"key",
"=",
"parts",
".",
"pop",
"(",
"0",
")",
"value",
"=",
"' '",
".",
"join",
"(",
"parts",
")",
"return",
"(",
"key",
",",
"value",
")"
] | parse a line in the input file into . | train | true |
37,189 | def return_future(f):
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
(callback, args, kwargs) = replacer.replace((lambda value=_NO_RESULT: future.set_result(value)), args, kwargs)
def handle_error(typ, value, tb):
future.set_exc_info((typ, value, tb))
return True
exc_info = None
with ExceptionStackContext(handle_error):
try:
result = f(*args, **kwargs)
if (result is not None):
raise ReturnValueIgnoredError('@return_future should not be used with functions that return values')
except:
exc_info = sys.exc_info()
raise
if (exc_info is not None):
future.result()
if (callback is not None):
def run_callback(future):
result = future.result()
if (result is _NO_RESULT):
callback()
else:
callback(future.result())
future.add_done_callback(wrap(run_callback))
return future
return wrapper
| [
"def",
"return_future",
"(",
"f",
")",
":",
"replacer",
"=",
"ArgReplacer",
"(",
"f",
",",
"'callback'",
")",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"future",
"=",
"TracebackFuture",
"(",
")",
"(",
"callback",
",",
"args",
",",
"kwargs",
")",
"=",
"replacer",
".",
"replace",
"(",
"(",
"lambda",
"value",
"=",
"_NO_RESULT",
":",
"future",
".",
"set_result",
"(",
"value",
")",
")",
",",
"args",
",",
"kwargs",
")",
"def",
"handle_error",
"(",
"typ",
",",
"value",
",",
"tb",
")",
":",
"future",
".",
"set_exc_info",
"(",
"(",
"typ",
",",
"value",
",",
"tb",
")",
")",
"return",
"True",
"exc_info",
"=",
"None",
"with",
"ExceptionStackContext",
"(",
"handle_error",
")",
":",
"try",
":",
"result",
"=",
"f",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"if",
"(",
"result",
"is",
"not",
"None",
")",
":",
"raise",
"ReturnValueIgnoredError",
"(",
"'@return_future should not be used with functions that return values'",
")",
"except",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"if",
"(",
"exc_info",
"is",
"not",
"None",
")",
":",
"future",
".",
"result",
"(",
")",
"if",
"(",
"callback",
"is",
"not",
"None",
")",
":",
"def",
"run_callback",
"(",
"future",
")",
":",
"result",
"=",
"future",
".",
"result",
"(",
")",
"if",
"(",
"result",
"is",
"_NO_RESULT",
")",
":",
"callback",
"(",
")",
"else",
":",
"callback",
"(",
"future",
".",
"result",
"(",
")",
")",
"future",
".",
"add_done_callback",
"(",
"wrap",
"(",
"run_callback",
")",
")",
"return",
"future",
"return",
"wrapper"
] | decorator to make a function that returns via callback return a future . | train | true |
37,190 | def get_title(soup):
if soup.title:
return soup.title.string
if soup.h1:
return soup.h1.string
return u''
| [
"def",
"get_title",
"(",
"soup",
")",
":",
"if",
"soup",
".",
"title",
":",
"return",
"soup",
".",
"title",
".",
"string",
"if",
"soup",
".",
"h1",
":",
"return",
"soup",
".",
"h1",
".",
"string",
"return",
"u''"
] | fetch the contents of url and try to extract the pages title . | train | true |
37,191 | def _IsFileOrDirWithFile(path):
return (os.path.isfile(path) or (os.path.isdir(path) and os.path.isfile(os.path.join(path, 'include.yaml'))))
| [
"def",
"_IsFileOrDirWithFile",
"(",
"path",
")",
":",
"return",
"(",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
"or",
"(",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'include.yaml'",
")",
")",
")",
")"
] | determine if a path is a file or a directory with an appropriate file . | train | false |
37,192 | def safe_sqr(X, copy=True):
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'], ensure_2d=False)
if issparse(X):
if copy:
X = X.copy()
X.data **= 2
elif copy:
X = (X ** 2)
else:
X **= 2
return X
| [
"def",
"safe_sqr",
"(",
"X",
",",
"copy",
"=",
"True",
")",
":",
"X",
"=",
"check_array",
"(",
"X",
",",
"accept_sparse",
"=",
"[",
"'csr'",
",",
"'csc'",
",",
"'coo'",
"]",
",",
"ensure_2d",
"=",
"False",
")",
"if",
"issparse",
"(",
"X",
")",
":",
"if",
"copy",
":",
"X",
"=",
"X",
".",
"copy",
"(",
")",
"X",
".",
"data",
"**=",
"2",
"elif",
"copy",
":",
"X",
"=",
"(",
"X",
"**",
"2",
")",
"else",
":",
"X",
"**=",
"2",
"return",
"X"
] | element wise squaring of array-likes and sparse matrices . | train | false |
37,193 | def is_email(identifier):
try:
validate_email(identifier)
except ValidationError:
return False
return True
| [
"def",
"is_email",
"(",
"identifier",
")",
":",
"try",
":",
"validate_email",
"(",
"identifier",
")",
"except",
"ValidationError",
":",
"return",
"False",
"return",
"True"
] | checks if an identifier string is a valid email . | train | false |
37,194 | def truncate_words(content, length=10, suffix='...'):
split = content.split()
if (len(split) <= length):
return ' '.join(split[:length])
else:
return (' '.join(split[:length]) + suffix)
| [
"def",
"truncate_words",
"(",
"content",
",",
"length",
"=",
"10",
",",
"suffix",
"=",
"'...'",
")",
":",
"split",
"=",
"content",
".",
"split",
"(",
")",
"if",
"(",
"len",
"(",
"split",
")",
"<=",
"length",
")",
":",
"return",
"' '",
".",
"join",
"(",
"split",
"[",
":",
"length",
"]",
")",
"else",
":",
"return",
"(",
"' '",
".",
"join",
"(",
"split",
"[",
":",
"length",
"]",
")",
"+",
"suffix",
")"
] | truncates a string after a certain number of words . | train | false |
37,195 | def _dipole_gof(uu, sing, vv, B, B2):
ncomp = (3 if ((sing[2] / sing[0]) > 0.2) else 2)
one = np.dot(vv[:ncomp], B)
Bm2 = np.sum((one * one))
gof = (Bm2 / B2)
return (gof, one)
| [
"def",
"_dipole_gof",
"(",
"uu",
",",
"sing",
",",
"vv",
",",
"B",
",",
"B2",
")",
":",
"ncomp",
"=",
"(",
"3",
"if",
"(",
"(",
"sing",
"[",
"2",
"]",
"/",
"sing",
"[",
"0",
"]",
")",
">",
"0.2",
")",
"else",
"2",
")",
"one",
"=",
"np",
".",
"dot",
"(",
"vv",
"[",
":",
"ncomp",
"]",
",",
"B",
")",
"Bm2",
"=",
"np",
".",
"sum",
"(",
"(",
"one",
"*",
"one",
")",
")",
"gof",
"=",
"(",
"Bm2",
"/",
"B2",
")",
"return",
"(",
"gof",
",",
"one",
")"
] | calculate the goodness of fit from the forward svd . | train | false |
37,196 | def iter_suite_tests(suite):
for item in suite._tests:
if isinstance(item, unittest.TestCase):
(yield item)
elif isinstance(item, unittest.TestSuite):
for r in iter_suite_tests(item):
(yield r)
else:
raise Exception(('unknown object %r inside test suite %r' % (item, suite)))
| [
"def",
"iter_suite_tests",
"(",
"suite",
")",
":",
"for",
"item",
"in",
"suite",
".",
"_tests",
":",
"if",
"isinstance",
"(",
"item",
",",
"unittest",
".",
"TestCase",
")",
":",
"(",
"yield",
"item",
")",
"elif",
"isinstance",
"(",
"item",
",",
"unittest",
".",
"TestSuite",
")",
":",
"for",
"r",
"in",
"iter_suite_tests",
"(",
"item",
")",
":",
"(",
"yield",
"r",
")",
"else",
":",
"raise",
"Exception",
"(",
"(",
"'unknown object %r inside test suite %r'",
"%",
"(",
"item",
",",
"suite",
")",
")",
")"
] | return all tests in a suite . | train | false |
37,197 | def _gen_md5_filehash(fname, *args):
_hash = hashlib.md5()
with salt.utils.fopen(fname, 'rb') as f:
for chunk in iter((lambda : f.read(4096)), ''):
_hash.update(chunk)
for extra_arg in args:
_hash.update(str(extra_arg))
return _hash.hexdigest()
| [
"def",
"_gen_md5_filehash",
"(",
"fname",
",",
"*",
"args",
")",
":",
"_hash",
"=",
"hashlib",
".",
"md5",
"(",
")",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"fname",
",",
"'rb'",
")",
"as",
"f",
":",
"for",
"chunk",
"in",
"iter",
"(",
"(",
"lambda",
":",
"f",
".",
"read",
"(",
"4096",
")",
")",
",",
"''",
")",
":",
"_hash",
".",
"update",
"(",
"chunk",
")",
"for",
"extra_arg",
"in",
"args",
":",
"_hash",
".",
"update",
"(",
"str",
"(",
"extra_arg",
")",
")",
"return",
"_hash",
".",
"hexdigest",
"(",
")"
] | helper function to generate a md5 hash of the swagger definition file any extra argument passed to the function is converted to a string and participates in the hash calculation . | train | true |
37,198 | def lambda_sum_smallest(X, k):
X = Expression.cast_to_const(X)
return (- lambda_sum_largest((- X), k))
| [
"def",
"lambda_sum_smallest",
"(",
"X",
",",
"k",
")",
":",
"X",
"=",
"Expression",
".",
"cast_to_const",
"(",
"X",
")",
"return",
"(",
"-",
"lambda_sum_largest",
"(",
"(",
"-",
"X",
")",
",",
"k",
")",
")"
] | sum of the largest k eigenvalues . | train | false |
37,200 | def make_secure_channel(credentials, user_agent, host, extra_options=None):
target = ('%s:%d' % (host, http_client.HTTPS_PORT))
http_request = google_auth_httplib2.Request(http=httplib2.Http())
user_agent_option = ('grpc.primary_user_agent', user_agent)
if (extra_options is not None):
options = ((user_agent_option,) + extra_options)
else:
options = (user_agent_option,)
return google.auth.transport.grpc.secure_authorized_channel(credentials, http_request, target, options=options)
| [
"def",
"make_secure_channel",
"(",
"credentials",
",",
"user_agent",
",",
"host",
",",
"extra_options",
"=",
"None",
")",
":",
"target",
"=",
"(",
"'%s:%d'",
"%",
"(",
"host",
",",
"http_client",
".",
"HTTPS_PORT",
")",
")",
"http_request",
"=",
"google_auth_httplib2",
".",
"Request",
"(",
"http",
"=",
"httplib2",
".",
"Http",
"(",
")",
")",
"user_agent_option",
"=",
"(",
"'grpc.primary_user_agent'",
",",
"user_agent",
")",
"if",
"(",
"extra_options",
"is",
"not",
"None",
")",
":",
"options",
"=",
"(",
"(",
"user_agent_option",
",",
")",
"+",
"extra_options",
")",
"else",
":",
"options",
"=",
"(",
"user_agent_option",
",",
")",
"return",
"google",
".",
"auth",
".",
"transport",
".",
"grpc",
".",
"secure_authorized_channel",
"(",
"credentials",
",",
"http_request",
",",
"target",
",",
"options",
"=",
"options",
")"
] | makes a secure channel for an rpc service . | train | false |
37,202 | def unbound_method_to_callable(func_or_cls):
if (isinstance(func_or_cls, types.MethodType) and (not func_or_cls.__self__)):
return func_or_cls.__func__
else:
return func_or_cls
| [
"def",
"unbound_method_to_callable",
"(",
"func_or_cls",
")",
":",
"if",
"(",
"isinstance",
"(",
"func_or_cls",
",",
"types",
".",
"MethodType",
")",
"and",
"(",
"not",
"func_or_cls",
".",
"__self__",
")",
")",
":",
"return",
"func_or_cls",
".",
"__func__",
"else",
":",
"return",
"func_or_cls"
] | adjust the incoming callable such that a self argument is not required . | train | false |
37,204 | def get_headers(fname, sep, count=60, is_multi_byte=False):
headers = []
for (idx, line) in enumerate(open(fname)):
line = line.rstrip('\n\r')
if is_multi_byte:
line = unicodify(line, 'utf-8')
sep = sep.encode('utf-8')
headers.append(line.split(sep))
if (idx == count):
break
return headers
| [
"def",
"get_headers",
"(",
"fname",
",",
"sep",
",",
"count",
"=",
"60",
",",
"is_multi_byte",
"=",
"False",
")",
":",
"headers",
"=",
"[",
"]",
"for",
"(",
"idx",
",",
"line",
")",
"in",
"enumerate",
"(",
"open",
"(",
"fname",
")",
")",
":",
"line",
"=",
"line",
".",
"rstrip",
"(",
"'\\n\\r'",
")",
"if",
"is_multi_byte",
":",
"line",
"=",
"unicodify",
"(",
"line",
",",
"'utf-8'",
")",
"sep",
"=",
"sep",
".",
"encode",
"(",
"'utf-8'",
")",
"headers",
".",
"append",
"(",
"line",
".",
"split",
"(",
"sep",
")",
")",
"if",
"(",
"idx",
"==",
"count",
")",
":",
"break",
"return",
"headers"
] | using session credentials/config . | train | false |
37,205 | def list_aliases():
ret = dict(((alias, target) for (alias, target, comment) in __parse_aliases() if alias))
return ret
| [
"def",
"list_aliases",
"(",
")",
":",
"ret",
"=",
"dict",
"(",
"(",
"(",
"alias",
",",
"target",
")",
"for",
"(",
"alias",
",",
"target",
",",
"comment",
")",
"in",
"__parse_aliases",
"(",
")",
"if",
"alias",
")",
")",
"return",
"ret"
] | return the aliases found in the aliases file in this format:: {alias: target} cli example: . | train | true |
37,207 | @Profiler.profile
def test_orm_full_objects_list(n):
sess = Session(engine)
objects = list(sess.query(Customer).limit(n))
| [
"@",
"Profiler",
".",
"profile",
"def",
"test_orm_full_objects_list",
"(",
"n",
")",
":",
"sess",
"=",
"Session",
"(",
"engine",
")",
"objects",
"=",
"list",
"(",
"sess",
".",
"query",
"(",
"Customer",
")",
".",
"limit",
"(",
"n",
")",
")"
] | load fully tracked orm objects into one big list() . | train | false |
37,208 | def is_special_slice(key):
if isinstance(key, tuple):
if (len(key) > 2):
raise IndexError('Invalid index/slice.')
key_elems = [key[0], key[1]]
else:
key_elems = [key]
for elem in key_elems:
if (not (isinstance(elem, (numbers.Number, slice)) or np.isscalar(elem))):
return True
return False
| [
"def",
"is_special_slice",
"(",
"key",
")",
":",
"if",
"isinstance",
"(",
"key",
",",
"tuple",
")",
":",
"if",
"(",
"len",
"(",
"key",
")",
">",
"2",
")",
":",
"raise",
"IndexError",
"(",
"'Invalid index/slice.'",
")",
"key_elems",
"=",
"[",
"key",
"[",
"0",
"]",
",",
"key",
"[",
"1",
"]",
"]",
"else",
":",
"key_elems",
"=",
"[",
"key",
"]",
"for",
"elem",
"in",
"key_elems",
":",
"if",
"(",
"not",
"(",
"isinstance",
"(",
"elem",
",",
"(",
"numbers",
".",
"Number",
",",
"slice",
")",
")",
"or",
"np",
".",
"isscalar",
"(",
"elem",
")",
")",
")",
":",
"return",
"True",
"return",
"False"
] | does the key contain a list . | train | false |
37,209 | def merge_roles(mop):
new_list = []
for m in mop:
if (m in new_list):
keep_this = new_list[new_list.index(m)]
if (not isinstance(keep_this.currentRole, list)):
keep_this.currentRole = [keep_this.currentRole]
keep_this.currentRole.append(m.currentRole)
else:
new_list.append(m)
return new_list
| [
"def",
"merge_roles",
"(",
"mop",
")",
":",
"new_list",
"=",
"[",
"]",
"for",
"m",
"in",
"mop",
":",
"if",
"(",
"m",
"in",
"new_list",
")",
":",
"keep_this",
"=",
"new_list",
"[",
"new_list",
".",
"index",
"(",
"m",
")",
"]",
"if",
"(",
"not",
"isinstance",
"(",
"keep_this",
".",
"currentRole",
",",
"list",
")",
")",
":",
"keep_this",
".",
"currentRole",
"=",
"[",
"keep_this",
".",
"currentRole",
"]",
"keep_this",
".",
"currentRole",
".",
"append",
"(",
"m",
".",
"currentRole",
")",
"else",
":",
"new_list",
".",
"append",
"(",
"m",
")",
"return",
"new_list"
] | merge multiple roles . | train | false |
37,210 | def parse_and_validate_push_spatial(parser, push_spatial):
try:
push_spatial = eval(push_spatial)
except (SyntaxError, NameError) as _:
err = ('Tried to parse the following push_spatial value\n%s\nas a Python expression, but it failed. push_spatial should be a valid Python expression.' % push_spatial)
parser.error(err)
if (push_spatial == None):
push_spatial = (0, 0)
elif (isinstance(push_spatial, tuple) and (len(push_spatial) == 2)):
pass
else:
err = ('push_spatial should be None or a valid tuple of indices of length 2, but it is: %s' % push_spatial)
parser.error(err)
return push_spatial
| [
"def",
"parse_and_validate_push_spatial",
"(",
"parser",
",",
"push_spatial",
")",
":",
"try",
":",
"push_spatial",
"=",
"eval",
"(",
"push_spatial",
")",
"except",
"(",
"SyntaxError",
",",
"NameError",
")",
"as",
"_",
":",
"err",
"=",
"(",
"'Tried to parse the following push_spatial value\\n%s\\nas a Python expression, but it failed. push_spatial should be a valid Python expression.'",
"%",
"push_spatial",
")",
"parser",
".",
"error",
"(",
"err",
")",
"if",
"(",
"push_spatial",
"==",
"None",
")",
":",
"push_spatial",
"=",
"(",
"0",
",",
"0",
")",
"elif",
"(",
"isinstance",
"(",
"push_spatial",
",",
"tuple",
")",
"and",
"(",
"len",
"(",
"push_spatial",
")",
"==",
"2",
")",
")",
":",
"pass",
"else",
":",
"err",
"=",
"(",
"'push_spatial should be None or a valid tuple of indices of length 2, but it is: %s'",
"%",
"push_spatial",
")",
"parser",
".",
"error",
"(",
"err",
")",
"return",
"push_spatial"
] | returns tuple of length 2 . | train | false |
37,211 | def textinfo_from_filename(path):
return TextInfo.init_from_filename(path)
| [
"def",
"textinfo_from_filename",
"(",
"path",
")",
":",
"return",
"TextInfo",
".",
"init_from_filename",
"(",
"path",
")"
] | determine test info for the given path **using the filename only** . | train | false |
37,212 | def assert_url_equal(url, other, compare_host=False):
parsed = urlparse(unicode(url))
parsed_other = urlparse(unicode(other))
assert (parsed.path == parsed_other.path)
assert (parse_qs(parsed.query) == parse_qs(parsed_other.query))
if compare_host:
assert (parsed.netloc == parsed_other.netloc)
| [
"def",
"assert_url_equal",
"(",
"url",
",",
"other",
",",
"compare_host",
"=",
"False",
")",
":",
"parsed",
"=",
"urlparse",
"(",
"unicode",
"(",
"url",
")",
")",
"parsed_other",
"=",
"urlparse",
"(",
"unicode",
"(",
"other",
")",
")",
"assert",
"(",
"parsed",
".",
"path",
"==",
"parsed_other",
".",
"path",
")",
"assert",
"(",
"parse_qs",
"(",
"parsed",
".",
"query",
")",
"==",
"parse_qs",
"(",
"parsed_other",
".",
"query",
")",
")",
"if",
"compare_host",
":",
"assert",
"(",
"parsed",
".",
"netloc",
"==",
"parsed_other",
".",
"netloc",
")"
] | compare url paths and query strings . | train | false |
37,216 | def exec_environment(pyfile='', request=None, response=None, session=None):
if (request is None):
request = Request({})
if (response is None):
response = Response()
if (session is None):
session = Session()
if (request.folder is None):
mo = re.match('(|.*/)applications/(?P<appname>[^/]+)', pyfile)
if mo:
appname = mo.group('appname')
request.folder = os.path.join('applications', appname)
else:
request.folder = ''
env = build_environment(request, response, session, store_current=False)
if pyfile:
pycfile = (pyfile + 'c')
if os.path.isfile(pycfile):
exec read_pyc(pycfile) in env
else:
execfile(pyfile, env)
return Storage(env)
| [
"def",
"exec_environment",
"(",
"pyfile",
"=",
"''",
",",
"request",
"=",
"None",
",",
"response",
"=",
"None",
",",
"session",
"=",
"None",
")",
":",
"if",
"(",
"request",
"is",
"None",
")",
":",
"request",
"=",
"Request",
"(",
"{",
"}",
")",
"if",
"(",
"response",
"is",
"None",
")",
":",
"response",
"=",
"Response",
"(",
")",
"if",
"(",
"session",
"is",
"None",
")",
":",
"session",
"=",
"Session",
"(",
")",
"if",
"(",
"request",
".",
"folder",
"is",
"None",
")",
":",
"mo",
"=",
"re",
".",
"match",
"(",
"'(|.*/)applications/(?P<appname>[^/]+)'",
",",
"pyfile",
")",
"if",
"mo",
":",
"appname",
"=",
"mo",
".",
"group",
"(",
"'appname'",
")",
"request",
".",
"folder",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'applications'",
",",
"appname",
")",
"else",
":",
"request",
".",
"folder",
"=",
"''",
"env",
"=",
"build_environment",
"(",
"request",
",",
"response",
",",
"session",
",",
"store_current",
"=",
"False",
")",
"if",
"pyfile",
":",
"pycfile",
"=",
"(",
"pyfile",
"+",
"'c'",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"pycfile",
")",
":",
"exec",
"read_pyc",
"(",
"pycfile",
")",
"in",
"env",
"else",
":",
"execfile",
"(",
"pyfile",
",",
"env",
")",
"return",
"Storage",
"(",
"env",
")"
] | environment builder and module loader . | train | false |
37,217 | def is_method(function):
return isinstance(function, MethodType)
| [
"def",
"is_method",
"(",
"function",
")",
":",
"return",
"isinstance",
"(",
"function",
",",
"MethodType",
")"
] | returns true if the passed in function is identified as a method . | train | false |
37,218 | @time_cache('call_signatures_validity')
def cache_call_signatures(evaluator, bracket_leaf, code_lines, user_pos):
index = (user_pos[0] - 1)
before_cursor = code_lines[index][:user_pos[1]]
other_lines = code_lines[bracket_leaf.start_pos[0]:index]
whole = '\n'.join((other_lines + [before_cursor]))
before_bracket = re.match('.*\\(', whole, re.DOTALL)
module_path = bracket_leaf.get_parent_until().path
if (module_path is None):
(yield None)
else:
(yield (module_path, before_bracket, bracket_leaf.start_pos))
(yield evaluate_goto_definition(evaluator, bracket_leaf.get_previous_leaf()))
| [
"@",
"time_cache",
"(",
"'call_signatures_validity'",
")",
"def",
"cache_call_signatures",
"(",
"evaluator",
",",
"bracket_leaf",
",",
"code_lines",
",",
"user_pos",
")",
":",
"index",
"=",
"(",
"user_pos",
"[",
"0",
"]",
"-",
"1",
")",
"before_cursor",
"=",
"code_lines",
"[",
"index",
"]",
"[",
":",
"user_pos",
"[",
"1",
"]",
"]",
"other_lines",
"=",
"code_lines",
"[",
"bracket_leaf",
".",
"start_pos",
"[",
"0",
"]",
":",
"index",
"]",
"whole",
"=",
"'\\n'",
".",
"join",
"(",
"(",
"other_lines",
"+",
"[",
"before_cursor",
"]",
")",
")",
"before_bracket",
"=",
"re",
".",
"match",
"(",
"'.*\\\\('",
",",
"whole",
",",
"re",
".",
"DOTALL",
")",
"module_path",
"=",
"bracket_leaf",
".",
"get_parent_until",
"(",
")",
".",
"path",
"if",
"(",
"module_path",
"is",
"None",
")",
":",
"(",
"yield",
"None",
")",
"else",
":",
"(",
"yield",
"(",
"module_path",
",",
"before_bracket",
",",
"bracket_leaf",
".",
"start_pos",
")",
")",
"(",
"yield",
"evaluate_goto_definition",
"(",
"evaluator",
",",
"bracket_leaf",
".",
"get_previous_leaf",
"(",
")",
")",
")"
] | this function calculates the cache key . | train | false |
37,219 | def make_package(name=None):
if (name is None):
name = 'test_package'
pkg = {'name': name, 'title': 'My Test Package', 'author': 'test author', 'author_email': 'test_author@test_author.com', 'maintainer': 'test maintainer', 'maintainer_email': 'test_maintainer@test_maintainer.com', 'notes': 'some test notes', 'url': 'www.example.com'}
res1 = {'url': 'http://www.example-resource.info', 'description': 'an example resource description', 'format': 'HTML', 'name': 'an example resource'}
res2 = {'url': 'http://www.example-resource2.info', 'description': 'another example resource description', 'format': 'PDF', 'name': 'another example resource'}
pkg['resources'] = [res1, res2]
tag1 = {'name': 'a_test_tag'}
tag2 = {'name': 'another_test_tag'}
pkg['tags'] = [tag1, tag2]
pkg['groups'] = [{'name': 'roger'}]
return pkg
| [
"def",
"make_package",
"(",
"name",
"=",
"None",
")",
":",
"if",
"(",
"name",
"is",
"None",
")",
":",
"name",
"=",
"'test_package'",
"pkg",
"=",
"{",
"'name'",
":",
"name",
",",
"'title'",
":",
"'My Test Package'",
",",
"'author'",
":",
"'test author'",
",",
"'author_email'",
":",
"'test_author@test_author.com'",
",",
"'maintainer'",
":",
"'test maintainer'",
",",
"'maintainer_email'",
":",
"'test_maintainer@test_maintainer.com'",
",",
"'notes'",
":",
"'some test notes'",
",",
"'url'",
":",
"'www.example.com'",
"}",
"res1",
"=",
"{",
"'url'",
":",
"'http://www.example-resource.info'",
",",
"'description'",
":",
"'an example resource description'",
",",
"'format'",
":",
"'HTML'",
",",
"'name'",
":",
"'an example resource'",
"}",
"res2",
"=",
"{",
"'url'",
":",
"'http://www.example-resource2.info'",
",",
"'description'",
":",
"'another example resource description'",
",",
"'format'",
":",
"'PDF'",
",",
"'name'",
":",
"'another example resource'",
"}",
"pkg",
"[",
"'resources'",
"]",
"=",
"[",
"res1",
",",
"res2",
"]",
"tag1",
"=",
"{",
"'name'",
":",
"'a_test_tag'",
"}",
"tag2",
"=",
"{",
"'name'",
":",
"'another_test_tag'",
"}",
"pkg",
"[",
"'tags'",
"]",
"=",
"[",
"tag1",
",",
"tag2",
"]",
"pkg",
"[",
"'groups'",
"]",
"=",
"[",
"{",
"'name'",
":",
"'roger'",
"}",
"]",
"return",
"pkg"
] | return a test package in dictionary form . | train | false |
37,220 | def collect_node_config_js(addons):
js_modules = []
for addon in addons:
js_path = paths.resolve_addon_path(addon.config, 'node-cfg.js')
if js_path:
js_modules.append(js_path)
return js_modules
| [
"def",
"collect_node_config_js",
"(",
"addons",
")",
":",
"js_modules",
"=",
"[",
"]",
"for",
"addon",
"in",
"addons",
":",
"js_path",
"=",
"paths",
".",
"resolve_addon_path",
"(",
"addon",
".",
"config",
",",
"'node-cfg.js'",
")",
"if",
"js_path",
":",
"js_modules",
".",
"append",
"(",
"js_path",
")",
"return",
"js_modules"
] | collect webpack bundles for each of the addons node-cfg . | train | false |
37,221 | def distro_release_attr(attribute):
return _distro.distro_release_attr(attribute)
| [
"def",
"distro_release_attr",
"(",
"attribute",
")",
":",
"return",
"_distro",
".",
"distro_release_attr",
"(",
"attribute",
")"
] | return a single named information item from the distro release file data source of the current linux distribution . | train | false |
37,222 | def get_project_count():
bugs = mysite.search.models.Bug.all_bugs.all()
return bugs.values(u'project').distinct().count()
| [
"def",
"get_project_count",
"(",
")",
":",
"bugs",
"=",
"mysite",
".",
"search",
".",
"models",
".",
"Bug",
".",
"all_bugs",
".",
"all",
"(",
")",
"return",
"bugs",
".",
"values",
"(",
"u'project'",
")",
".",
"distinct",
"(",
")",
".",
"count",
"(",
")"
] | retrieve the number of projects currently indexed . | train | false |
37,223 | def gce_from_configuration(cluster_id, project=None, zone=None, credentials=None):
if (project is None):
project = get_machine_project()
if (zone is None):
zone = get_machine_zone()
gce_credentials = gce_credentials_from_config(credentials)
compute = discovery.build('compute', 'v1', credentials=gce_credentials)
return GCEBlockDeviceAPI(_operations=GCEOperations(_compute=compute, _project=unicode(project), _zone=unicode(zone)), _cluster_id=unicode(cluster_id))
| [
"def",
"gce_from_configuration",
"(",
"cluster_id",
",",
"project",
"=",
"None",
",",
"zone",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"if",
"(",
"project",
"is",
"None",
")",
":",
"project",
"=",
"get_machine_project",
"(",
")",
"if",
"(",
"zone",
"is",
"None",
")",
":",
"zone",
"=",
"get_machine_zone",
"(",
")",
"gce_credentials",
"=",
"gce_credentials_from_config",
"(",
"credentials",
")",
"compute",
"=",
"discovery",
".",
"build",
"(",
"'compute'",
",",
"'v1'",
",",
"credentials",
"=",
"gce_credentials",
")",
"return",
"GCEBlockDeviceAPI",
"(",
"_operations",
"=",
"GCEOperations",
"(",
"_compute",
"=",
"compute",
",",
"_project",
"=",
"unicode",
"(",
"project",
")",
",",
"_zone",
"=",
"unicode",
"(",
"zone",
")",
")",
",",
"_cluster_id",
"=",
"unicode",
"(",
"cluster_id",
")",
")"
] | build a gceblockdeviceapi instance using data from configuration . | train | false |
37,224 | @blueprint.route('/resources')
def list_all_resources():
return _list_resources(project=acl.get_limited_to_project(flask.request.headers))
| [
"@",
"blueprint",
".",
"route",
"(",
"'/resources'",
")",
"def",
"list_all_resources",
"(",
")",
":",
"return",
"_list_resources",
"(",
"project",
"=",
"acl",
".",
"get_limited_to_project",
"(",
"flask",
".",
"request",
".",
"headers",
")",
")"
] | return a list of all known resources . | train | false |
37,225 | def primefactors(n, limit=None, verbose=False):
n = int(n)
factors = sorted(factorint(n, limit=limit, verbose=verbose).keys())
s = [f for f in factors[:(-1):None] if (f not in [(-1), 0, 1])]
if (factors and isprime(factors[(-1)])):
s += [factors[(-1)]]
return s
| [
"def",
"primefactors",
"(",
"n",
",",
"limit",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"n",
"=",
"int",
"(",
"n",
")",
"factors",
"=",
"sorted",
"(",
"factorint",
"(",
"n",
",",
"limit",
"=",
"limit",
",",
"verbose",
"=",
"verbose",
")",
".",
"keys",
"(",
")",
")",
"s",
"=",
"[",
"f",
"for",
"f",
"in",
"factors",
"[",
":",
"(",
"-",
"1",
")",
":",
"None",
"]",
"if",
"(",
"f",
"not",
"in",
"[",
"(",
"-",
"1",
")",
",",
"0",
",",
"1",
"]",
")",
"]",
"if",
"(",
"factors",
"and",
"isprime",
"(",
"factors",
"[",
"(",
"-",
"1",
")",
"]",
")",
")",
":",
"s",
"+=",
"[",
"factors",
"[",
"(",
"-",
"1",
")",
"]",
"]",
"return",
"s"
] | return a sorted list of ns prime factors . | train | false |
37,226 | def _dehex(s):
import re
s = re.sub('[^a-fA-F\\d]', '', s)
return s.decode('hex')
| [
"def",
"_dehex",
"(",
"s",
")",
":",
"import",
"re",
"s",
"=",
"re",
".",
"sub",
"(",
"'[^a-fA-F\\\\d]'",
",",
"''",
",",
"s",
")",
"return",
"s",
".",
"decode",
"(",
"'hex'",
")"
] | liberally convert from hex string to binary string . | train | false |
37,228 | def get_secgroup_id(kwargs=None, call=None):
if (call == 'action'):
raise SaltCloudSystemExit('The get_secgroup_id function must be called with -f or --function.')
if (kwargs is None):
kwargs = {}
name = kwargs.get('name', None)
if (name is None):
raise SaltCloudSystemExit("The get_secgroup_id function requires a 'name'.")
try:
ret = list_security_groups()[name]['id']
except KeyError:
raise SaltCloudSystemExit("The security group '{0}' could not be found.".format(name))
return ret
| [
"def",
"get_secgroup_id",
"(",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"==",
"'action'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The get_secgroup_id function must be called with -f or --function.'",
")",
"if",
"(",
"kwargs",
"is",
"None",
")",
":",
"kwargs",
"=",
"{",
"}",
"name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"None",
")",
"if",
"(",
"name",
"is",
"None",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"\"The get_secgroup_id function requires a 'name'.\"",
")",
"try",
":",
"ret",
"=",
"list_security_groups",
"(",
")",
"[",
"name",
"]",
"[",
"'id'",
"]",
"except",
"KeyError",
":",
"raise",
"SaltCloudSystemExit",
"(",
"\"The security group '{0}' could not be found.\"",
".",
"format",
"(",
"name",
")",
")",
"return",
"ret"
] | returns a security groups id from the given security group name . | train | true |
37,230 | def ensure_ca_bundle_dir():
global ca_bundle_dir
if (not ca_bundle_dir):
ca_bundle_dir = os.path.join(sublime.packages_path(), 'User')
if (not os.path.exists(ca_bundle_dir)):
os.mkdir(ca_bundle_dir)
| [
"def",
"ensure_ca_bundle_dir",
"(",
")",
":",
"global",
"ca_bundle_dir",
"if",
"(",
"not",
"ca_bundle_dir",
")",
":",
"ca_bundle_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"sublime",
".",
"packages_path",
"(",
")",
",",
"'User'",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"ca_bundle_dir",
")",
")",
":",
"os",
".",
"mkdir",
"(",
"ca_bundle_dir",
")"
] | make sure we have a placed to save the merged-ca-bundle and system-ca-bundle . | train | false |
37,232 | def _ListSecrets(io_loop):
for f in _GetSecretsManager().ListSecrets():
print (' %s' % f)
io_loop.stop()
| [
"def",
"_ListSecrets",
"(",
"io_loop",
")",
":",
"for",
"f",
"in",
"_GetSecretsManager",
"(",
")",
".",
"ListSecrets",
"(",
")",
":",
"print",
"(",
"' %s'",
"%",
"f",
")",
"io_loop",
".",
"stop",
"(",
")"
] | lists all secrets . | train | false |
37,234 | def load_database(gb_filename_or_handle):
TESTDB = create_database()
db_name = 'biosql-test'
server = BioSeqDatabase.open_database(driver=DBDRIVER, user=DBUSER, passwd=DBPASSWD, host=DBHOST, db=TESTDB)
db = server.new_database(db_name)
iterator = SeqIO.parse(gb_filename_or_handle, 'gb')
count = db.load(iterator)
server.commit()
server.close()
return count
| [
"def",
"load_database",
"(",
"gb_filename_or_handle",
")",
":",
"TESTDB",
"=",
"create_database",
"(",
")",
"db_name",
"=",
"'biosql-test'",
"server",
"=",
"BioSeqDatabase",
".",
"open_database",
"(",
"driver",
"=",
"DBDRIVER",
",",
"user",
"=",
"DBUSER",
",",
"passwd",
"=",
"DBPASSWD",
",",
"host",
"=",
"DBHOST",
",",
"db",
"=",
"TESTDB",
")",
"db",
"=",
"server",
".",
"new_database",
"(",
"db_name",
")",
"iterator",
"=",
"SeqIO",
".",
"parse",
"(",
"gb_filename_or_handle",
",",
"'gb'",
")",
"count",
"=",
"db",
".",
"load",
"(",
"iterator",
")",
"server",
".",
"commit",
"(",
")",
"server",
".",
"close",
"(",
")",
"return",
"count"
] | load a genbank file into a new biosql database . | train | false |
37,235 | def gencastshapes():
for n in range(32):
(yield [n])
ndim = randrange(4, 6)
minshape = (1 if (randrange(100) > 80) else 2)
(yield [randrange(minshape, 5) for _ in range(ndim)])
ndim = randrange(2, 4)
minshape = (1 if (randrange(100) > 80) else 2)
(yield [randrange(minshape, 5) for _ in range(ndim)])
| [
"def",
"gencastshapes",
"(",
")",
":",
"for",
"n",
"in",
"range",
"(",
"32",
")",
":",
"(",
"yield",
"[",
"n",
"]",
")",
"ndim",
"=",
"randrange",
"(",
"4",
",",
"6",
")",
"minshape",
"=",
"(",
"1",
"if",
"(",
"randrange",
"(",
"100",
")",
">",
"80",
")",
"else",
"2",
")",
"(",
"yield",
"[",
"randrange",
"(",
"minshape",
",",
"5",
")",
"for",
"_",
"in",
"range",
"(",
"ndim",
")",
"]",
")",
"ndim",
"=",
"randrange",
"(",
"2",
",",
"4",
")",
"minshape",
"=",
"(",
"1",
"if",
"(",
"randrange",
"(",
"100",
")",
">",
"80",
")",
"else",
"2",
")",
"(",
"yield",
"[",
"randrange",
"(",
"minshape",
",",
"5",
")",
"for",
"_",
"in",
"range",
"(",
"ndim",
")",
"]",
")"
] | generate shapes to test casting . | train | false |
37,236 | def test_nearmiss_fit_single_class():
ratio = 'auto'
nm3 = NearMiss(ratio=ratio, random_state=RND_SEED, version=VERSION_NEARMISS)
y_single_class = np.zeros((X.shape[0],))
assert_warns(UserWarning, nm3.fit, X, y_single_class)
| [
"def",
"test_nearmiss_fit_single_class",
"(",
")",
":",
"ratio",
"=",
"'auto'",
"nm3",
"=",
"NearMiss",
"(",
"ratio",
"=",
"ratio",
",",
"random_state",
"=",
"RND_SEED",
",",
"version",
"=",
"VERSION_NEARMISS",
")",
"y_single_class",
"=",
"np",
".",
"zeros",
"(",
"(",
"X",
".",
"shape",
"[",
"0",
"]",
",",
")",
")",
"assert_warns",
"(",
"UserWarning",
",",
"nm3",
".",
"fit",
",",
"X",
",",
"y_single_class",
")"
] | test either if an error when there is a single class . | train | false |
37,238 | @patch('static_replace.staticfiles_storage', autospec=True)
@patch('static_replace.modulestore', autospec=True)
@override_settings(STATIC_URL='https://example.com/static/')
def test_static_url_with_xblock_resource_on_cdn(mock_modulestore, mock_storage):
mock_storage.exists.return_value = False
mock_modulestore.return_value = Mock(MongoModuleStore)
pre_text = 'EMBED src ="https://example.com/static/xblock/resources/tehehe.xblock/public/images/woo.png"'
post_text = pre_text
assert_equals(post_text, replace_static_urls(pre_text, DATA_DIRECTORY, COURSE_KEY))
| [
"@",
"patch",
"(",
"'static_replace.staticfiles_storage'",
",",
"autospec",
"=",
"True",
")",
"@",
"patch",
"(",
"'static_replace.modulestore'",
",",
"autospec",
"=",
"True",
")",
"@",
"override_settings",
"(",
"STATIC_URL",
"=",
"'https://example.com/static/'",
")",
"def",
"test_static_url_with_xblock_resource_on_cdn",
"(",
"mock_modulestore",
",",
"mock_storage",
")",
":",
"mock_storage",
".",
"exists",
".",
"return_value",
"=",
"False",
"mock_modulestore",
".",
"return_value",
"=",
"Mock",
"(",
"MongoModuleStore",
")",
"pre_text",
"=",
"'EMBED src =\"https://example.com/static/xblock/resources/tehehe.xblock/public/images/woo.png\"'",
"post_text",
"=",
"pre_text",
"assert_equals",
"(",
"post_text",
",",
"replace_static_urls",
"(",
"pre_text",
",",
"DATA_DIRECTORY",
",",
"COURSE_KEY",
")",
")"
] | make sure that for urls with xblock resource url . | train | false |
37,239 | def series_prepare_matrix(series_id, series, logo, lang_dict, justified=False):
template = s3db.survey_getTemplateFromSeries(series_id)
template_id = template.id
section_list = s3db.survey_getAllSectionsForSeries(series_id)
survey_T = s3db.survey_T
title = ('%s (%s)' % (series.name, template.name))
title = survey_T(title, lang_dict)
layout = []
survey_getQstnLayoutRules = s3db.survey_getQstnLayoutRules
for section in section_list:
section_name = survey_T(section['name'], lang_dict)
rules = survey_getQstnLayoutRules(template_id, section['section_id'])
layout_rules = [section_name, rules]
layout.append(layout_rules)
widget_list = s3db.survey_getAllWidgetsForTemplate(template_id)
layout_blocks = s3db.survey_LayoutBlocks()
preliminary_matrix = s3db.survey_getMatrix(title, logo, layout, widget_list, False, lang_dict, showSectionLabels=False, layoutBlocks=layout_blocks)
if (not justified):
return preliminary_matrix
layout_blocks.align()
layout_blocks = s3db.survey_LayoutBlocks()
(matrix1, matrix2) = s3db.survey_getMatrix(title, logo, layout, widget_list, True, lang_dict, showSectionLabels=False)
return (matrix1, matrix2)
| [
"def",
"series_prepare_matrix",
"(",
"series_id",
",",
"series",
",",
"logo",
",",
"lang_dict",
",",
"justified",
"=",
"False",
")",
":",
"template",
"=",
"s3db",
".",
"survey_getTemplateFromSeries",
"(",
"series_id",
")",
"template_id",
"=",
"template",
".",
"id",
"section_list",
"=",
"s3db",
".",
"survey_getAllSectionsForSeries",
"(",
"series_id",
")",
"survey_T",
"=",
"s3db",
".",
"survey_T",
"title",
"=",
"(",
"'%s (%s)'",
"%",
"(",
"series",
".",
"name",
",",
"template",
".",
"name",
")",
")",
"title",
"=",
"survey_T",
"(",
"title",
",",
"lang_dict",
")",
"layout",
"=",
"[",
"]",
"survey_getQstnLayoutRules",
"=",
"s3db",
".",
"survey_getQstnLayoutRules",
"for",
"section",
"in",
"section_list",
":",
"section_name",
"=",
"survey_T",
"(",
"section",
"[",
"'name'",
"]",
",",
"lang_dict",
")",
"rules",
"=",
"survey_getQstnLayoutRules",
"(",
"template_id",
",",
"section",
"[",
"'section_id'",
"]",
")",
"layout_rules",
"=",
"[",
"section_name",
",",
"rules",
"]",
"layout",
".",
"append",
"(",
"layout_rules",
")",
"widget_list",
"=",
"s3db",
".",
"survey_getAllWidgetsForTemplate",
"(",
"template_id",
")",
"layout_blocks",
"=",
"s3db",
".",
"survey_LayoutBlocks",
"(",
")",
"preliminary_matrix",
"=",
"s3db",
".",
"survey_getMatrix",
"(",
"title",
",",
"logo",
",",
"layout",
",",
"widget_list",
",",
"False",
",",
"lang_dict",
",",
"showSectionLabels",
"=",
"False",
",",
"layoutBlocks",
"=",
"layout_blocks",
")",
"if",
"(",
"not",
"justified",
")",
":",
"return",
"preliminary_matrix",
"layout_blocks",
".",
"align",
"(",
")",
"layout_blocks",
"=",
"s3db",
".",
"survey_LayoutBlocks",
"(",
")",
"(",
"matrix1",
",",
"matrix2",
")",
"=",
"s3db",
".",
"survey_getMatrix",
"(",
"title",
",",
"logo",
",",
"layout",
",",
"widget_list",
",",
"True",
",",
"lang_dict",
",",
"showSectionLabels",
"=",
"False",
")",
"return",
"(",
"matrix1",
",",
"matrix2",
")"
] | helper function for series_export_formatted() . | train | false |
37,240 | def reset_sequence(model):
sql = connection.ops.sequence_reset_sql(no_style(), [model])
for cmd in sql:
connection.cursor().execute(cmd)
| [
"def",
"reset_sequence",
"(",
"model",
")",
":",
"sql",
"=",
"connection",
".",
"ops",
".",
"sequence_reset_sql",
"(",
"no_style",
"(",
")",
",",
"[",
"model",
"]",
")",
"for",
"cmd",
"in",
"sql",
":",
"connection",
".",
"cursor",
"(",
")",
".",
"execute",
"(",
"cmd",
")"
] | reset the id sequence for a model . | train | true |
37,241 | def get_variables_to_restore():
return tf.get_collection(VARIABLES_TO_RESTORE)[:]
| [
"def",
"get_variables_to_restore",
"(",
")",
":",
"return",
"tf",
".",
"get_collection",
"(",
"VARIABLES_TO_RESTORE",
")",
"[",
":",
"]"
] | gets the list of variables to restore . | train | false |
37,242 | def _downgrade_other(t_images, t_image_members, t_image_properties, dialect):
foreign_keys = _get_foreign_keys(t_images, t_image_members, t_image_properties, dialect)
for fk in foreign_keys:
fk.drop()
t_images.c.id.alter(sqlalchemy.Integer(), primary_key=True)
t_image_members.c.image_id.alter(sqlalchemy.Integer())
t_image_properties.c.image_id.alter(sqlalchemy.Integer())
_update_all_uuids_to_ids(t_images, t_image_members, t_image_properties)
for fk in foreign_keys:
fk.create()
| [
"def",
"_downgrade_other",
"(",
"t_images",
",",
"t_image_members",
",",
"t_image_properties",
",",
"dialect",
")",
":",
"foreign_keys",
"=",
"_get_foreign_keys",
"(",
"t_images",
",",
"t_image_members",
",",
"t_image_properties",
",",
"dialect",
")",
"for",
"fk",
"in",
"foreign_keys",
":",
"fk",
".",
"drop",
"(",
")",
"t_images",
".",
"c",
".",
"id",
".",
"alter",
"(",
"sqlalchemy",
".",
"Integer",
"(",
")",
",",
"primary_key",
"=",
"True",
")",
"t_image_members",
".",
"c",
".",
"image_id",
".",
"alter",
"(",
"sqlalchemy",
".",
"Integer",
"(",
")",
")",
"t_image_properties",
".",
"c",
".",
"image_id",
".",
"alter",
"(",
"sqlalchemy",
".",
"Integer",
"(",
")",
")",
"_update_all_uuids_to_ids",
"(",
"t_images",
",",
"t_image_members",
",",
"t_image_properties",
")",
"for",
"fk",
"in",
"foreign_keys",
":",
"fk",
".",
"create",
"(",
")"
] | downgrade 012 -> 011 with logic for non-sqlite databases . | train | false |
37,245 | def create_region(name):
region = cache.create_region()
region.name = name
return region
| [
"def",
"create_region",
"(",
"name",
")",
":",
"region",
"=",
"cache",
".",
"create_region",
"(",
")",
"region",
".",
"name",
"=",
"name",
"return",
"region"
] | create a dopile region . | train | false |
37,246 | def test_challenge(signature):
_check_challenge(signature)
print 'Check complete'
return 'OK!'
| [
"def",
"test_challenge",
"(",
"signature",
")",
":",
"_check_challenge",
"(",
"signature",
")",
"print",
"'Check complete'",
"return",
"'OK!'"
] | test function to check if rsa is working . | train | false |
37,247 | def infer_end(self, context=None):
(yield self)
| [
"def",
"infer_end",
"(",
"self",
",",
"context",
"=",
"None",
")",
":",
"(",
"yield",
"self",
")"
] | inferences end for node such as module . | train | false |
37,248 | @allow_cross_site_request
@non_atomic_requests
def render_csv(request, addon, stats, fields, title=None, show_disclaimer=None):
ts = time.strftime('%c %z')
context = {'addon': addon, 'timestamp': ts, 'title': title, 'show_disclaimer': show_disclaimer}
response = render(request, 'stats/csv_header.txt', context)
writer = UnicodeCSVDictWriter(response, fields, restval=0, extrasaction='ignore')
writer.writeheader()
writer.writerows(stats)
fudge_headers(response, stats)
response['Content-Type'] = 'text/csv; charset=utf-8'
return response
| [
"@",
"allow_cross_site_request",
"@",
"non_atomic_requests",
"def",
"render_csv",
"(",
"request",
",",
"addon",
",",
"stats",
",",
"fields",
",",
"title",
"=",
"None",
",",
"show_disclaimer",
"=",
"None",
")",
":",
"ts",
"=",
"time",
".",
"strftime",
"(",
"'%c %z'",
")",
"context",
"=",
"{",
"'addon'",
":",
"addon",
",",
"'timestamp'",
":",
"ts",
",",
"'title'",
":",
"title",
",",
"'show_disclaimer'",
":",
"show_disclaimer",
"}",
"response",
"=",
"render",
"(",
"request",
",",
"'stats/csv_header.txt'",
",",
"context",
")",
"writer",
"=",
"UnicodeCSVDictWriter",
"(",
"response",
",",
"fields",
",",
"restval",
"=",
"0",
",",
"extrasaction",
"=",
"'ignore'",
")",
"writer",
".",
"writeheader",
"(",
")",
"writer",
".",
"writerows",
"(",
"stats",
")",
"fudge_headers",
"(",
"response",
",",
"stats",
")",
"response",
"[",
"'Content-Type'",
"]",
"=",
"'text/csv; charset=utf-8'",
"return",
"response"
] | render a stats series in csv . | train | false |
37,249 | def calc_angle(v1, v2, v3):
v1 = (v1 - v2)
v3 = (v3 - v2)
return v1.angle(v3)
| [
"def",
"calc_angle",
"(",
"v1",
",",
"v2",
",",
"v3",
")",
":",
"v1",
"=",
"(",
"v1",
"-",
"v2",
")",
"v3",
"=",
"(",
"v3",
"-",
"v2",
")",
"return",
"v1",
".",
"angle",
"(",
"v3",
")"
] | calculate the angle between 3 vectors representing 3 connected points . | train | false |
37,251 | def build_number(best=False):
return _distro.build_number(best)
| [
"def",
"build_number",
"(",
"best",
"=",
"False",
")",
":",
"return",
"_distro",
".",
"build_number",
"(",
"best",
")"
] | return the build number of the current linux distribution . | train | false |
37,252 | def _iteritems(d):
return (d.iteritems() if hasattr(d, 'iteritems') else d.items())
| [
"def",
"_iteritems",
"(",
"d",
")",
":",
"return",
"(",
"d",
".",
"iteritems",
"(",
")",
"if",
"hasattr",
"(",
"d",
",",
"'iteritems'",
")",
"else",
"d",
".",
"items",
"(",
")",
")"
] | like d . | train | false |
37,253 | def apply_nms(all_boxes, thresh):
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in xrange(num_images)] for _ in xrange(num_classes)]
for cls_ind in xrange(num_classes):
for im_ind in xrange(num_images):
dets = all_boxes[cls_ind][im_ind]
if (dets == []):
continue
keep = nms(dets, thresh)
if (len(keep) == 0):
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
return nms_boxes
| [
"def",
"apply_nms",
"(",
"all_boxes",
",",
"thresh",
")",
":",
"num_classes",
"=",
"len",
"(",
"all_boxes",
")",
"num_images",
"=",
"len",
"(",
"all_boxes",
"[",
"0",
"]",
")",
"nms_boxes",
"=",
"[",
"[",
"[",
"]",
"for",
"_",
"in",
"xrange",
"(",
"num_images",
")",
"]",
"for",
"_",
"in",
"xrange",
"(",
"num_classes",
")",
"]",
"for",
"cls_ind",
"in",
"xrange",
"(",
"num_classes",
")",
":",
"for",
"im_ind",
"in",
"xrange",
"(",
"num_images",
")",
":",
"dets",
"=",
"all_boxes",
"[",
"cls_ind",
"]",
"[",
"im_ind",
"]",
"if",
"(",
"dets",
"==",
"[",
"]",
")",
":",
"continue",
"keep",
"=",
"nms",
"(",
"dets",
",",
"thresh",
")",
"if",
"(",
"len",
"(",
"keep",
")",
"==",
"0",
")",
":",
"continue",
"nms_boxes",
"[",
"cls_ind",
"]",
"[",
"im_ind",
"]",
"=",
"dets",
"[",
"keep",
",",
":",
"]",
".",
"copy",
"(",
")",
"return",
"nms_boxes"
] | apply non-maximum suppression to all predicted boxes output by the test_net method . | train | false |
37,255 | def test_download_model(qapp, qtmodeltester, config_stub, cookiejar_and_cache):
config_stub.data = {'general': {'private-browsing': False}}
manager = qtnetworkdownloads.DownloadManager(win_id=0)
model = downloads.DownloadModel(manager)
qtmodeltester.check(model)
| [
"def",
"test_download_model",
"(",
"qapp",
",",
"qtmodeltester",
",",
"config_stub",
",",
"cookiejar_and_cache",
")",
":",
"config_stub",
".",
"data",
"=",
"{",
"'general'",
":",
"{",
"'private-browsing'",
":",
"False",
"}",
"}",
"manager",
"=",
"qtnetworkdownloads",
".",
"DownloadManager",
"(",
"win_id",
"=",
"0",
")",
"model",
"=",
"downloads",
".",
"DownloadModel",
"(",
"manager",
")",
"qtmodeltester",
".",
"check",
"(",
"model",
")"
] | simple check for download model internals . | train | false |
37,257 | def _get_credit_course_requirement_xblocks(course_key):
requirements = []
for category in CREDIT_REQUIREMENT_XBLOCK_CATEGORIES:
requirements.extend([{'namespace': block.get_credit_requirement_namespace(), 'name': block.get_credit_requirement_name(), 'display_name': block.get_credit_requirement_display_name(), 'start_date': block.start, 'criteria': {}} for block in _get_xblocks(course_key, category) if _is_credit_requirement(block)])
return requirements
| [
"def",
"_get_credit_course_requirement_xblocks",
"(",
"course_key",
")",
":",
"requirements",
"=",
"[",
"]",
"for",
"category",
"in",
"CREDIT_REQUIREMENT_XBLOCK_CATEGORIES",
":",
"requirements",
".",
"extend",
"(",
"[",
"{",
"'namespace'",
":",
"block",
".",
"get_credit_requirement_namespace",
"(",
")",
",",
"'name'",
":",
"block",
".",
"get_credit_requirement_name",
"(",
")",
",",
"'display_name'",
":",
"block",
".",
"get_credit_requirement_display_name",
"(",
")",
",",
"'start_date'",
":",
"block",
".",
"start",
",",
"'criteria'",
":",
"{",
"}",
"}",
"for",
"block",
"in",
"_get_xblocks",
"(",
"course_key",
",",
"category",
")",
"if",
"_is_credit_requirement",
"(",
"block",
")",
"]",
")",
"return",
"requirements"
] | generate a course structure dictionary for the specified course . | train | false |
37,258 | def piped_in():
with sys.stdin as stdin:
if (not stdin.isatty()):
return stdin.read()
else:
return None
| [
"def",
"piped_in",
"(",
")",
":",
"with",
"sys",
".",
"stdin",
"as",
"stdin",
":",
"if",
"(",
"not",
"stdin",
".",
"isatty",
"(",
")",
")",
":",
"return",
"stdin",
".",
"read",
"(",
")",
"else",
":",
"return",
"None"
] | returns piped input via stdin . | train | false |
37,260 | def pPca(data, dim):
num = data.shape[1]
data = asmatrix(makeCentered(data))
W = asmatrix(standard_normal((num, dim)))
W_ = W[:]
while True:
E = ((inv((W.T * W)) * W.T) * data.T)
(W, W_) = (((data.T * E.T) * inv((E * E.T))), W)
if (abs((W - W_)).max() < 0.001):
break
return W.T
| [
"def",
"pPca",
"(",
"data",
",",
"dim",
")",
":",
"num",
"=",
"data",
".",
"shape",
"[",
"1",
"]",
"data",
"=",
"asmatrix",
"(",
"makeCentered",
"(",
"data",
")",
")",
"W",
"=",
"asmatrix",
"(",
"standard_normal",
"(",
"(",
"num",
",",
"dim",
")",
")",
")",
"W_",
"=",
"W",
"[",
":",
"]",
"while",
"True",
":",
"E",
"=",
"(",
"(",
"inv",
"(",
"(",
"W",
".",
"T",
"*",
"W",
")",
")",
"*",
"W",
".",
"T",
")",
"*",
"data",
".",
"T",
")",
"(",
"W",
",",
"W_",
")",
"=",
"(",
"(",
"(",
"data",
".",
"T",
"*",
"E",
".",
"T",
")",
"*",
"inv",
"(",
"(",
"E",
"*",
"E",
".",
"T",
")",
")",
")",
",",
"W",
")",
"if",
"(",
"abs",
"(",
"(",
"W",
"-",
"W_",
")",
")",
".",
"max",
"(",
")",
"<",
"0.001",
")",
":",
"break",
"return",
"W",
".",
"T"
] | return a matrix which contains the first dim dimensions principal components of data . | train | false |
37,262 | def run_shell(use_plain=False):
from django.db.models.loading import get_models
loaded_models = get_models()
try:
if use_plain:
raise ImportError
import IPython
shell = IPython.Shell.IPShell(argv=[])
shell.mainloop()
except ImportError:
import code
try:
import readline
except ImportError:
pass
else:
import rlcompleter
readline.parse_and_bind('tab:complete')
code.interact()
| [
"def",
"run_shell",
"(",
"use_plain",
"=",
"False",
")",
":",
"from",
"django",
".",
"db",
".",
"models",
".",
"loading",
"import",
"get_models",
"loaded_models",
"=",
"get_models",
"(",
")",
"try",
":",
"if",
"use_plain",
":",
"raise",
"ImportError",
"import",
"IPython",
"shell",
"=",
"IPython",
".",
"Shell",
".",
"IPShell",
"(",
"argv",
"=",
"[",
"]",
")",
"shell",
".",
"mainloop",
"(",
")",
"except",
"ImportError",
":",
"import",
"code",
"try",
":",
"import",
"readline",
"except",
"ImportError",
":",
"pass",
"else",
":",
"import",
"rlcompleter",
"readline",
".",
"parse_and_bind",
"(",
"'tab:complete'",
")",
"code",
".",
"interact",
"(",
")"
] | runs a python interactive interpreter . | train | false |
37,263 | def Condition(*args, **kwargs):
return _Condition(*args, **kwargs)
| [
"def",
"Condition",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"_Condition",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] | factory function that returns a new condition variable object . | train | false |
37,264 | def tax_subtract(price, tax_percentage=21):
if (price is None):
return None
result = (price / ((100 + tax_percentage) / D(100)))
return result.quantize(D('0.01'), ROUND_HALF_UP)
| [
"def",
"tax_subtract",
"(",
"price",
",",
"tax_percentage",
"=",
"21",
")",
":",
"if",
"(",
"price",
"is",
"None",
")",
":",
"return",
"None",
"result",
"=",
"(",
"price",
"/",
"(",
"(",
"100",
"+",
"tax_percentage",
")",
"/",
"D",
"(",
"100",
")",
")",
")",
"return",
"result",
".",
"quantize",
"(",
"D",
"(",
"'0.01'",
")",
",",
"ROUND_HALF_UP",
")"
] | subtract the given tax_percentage from the given price . | train | false |
37,266 | def get_dhcp_opts(context, network_ref, fixedips):
gateway = network_ref['gateway']
if (network_ref['multi_host'] and (not (network_ref['share_address'] or CONF.share_dhcp_address))):
gateway = network_ref['dhcp_server']
hosts = []
if CONF.use_single_default_gateway:
for fixedip in fixedips:
if fixedip.allocated:
vif_id = fixedip.virtual_interface_id
if fixedip.default_route:
hosts.append(_host_dhcp_opts(vif_id, gateway))
else:
hosts.append(_host_dhcp_opts(vif_id))
else:
hosts.append(_host_dhcp_opts(None, gateway))
return '\n'.join(hosts)
| [
"def",
"get_dhcp_opts",
"(",
"context",
",",
"network_ref",
",",
"fixedips",
")",
":",
"gateway",
"=",
"network_ref",
"[",
"'gateway'",
"]",
"if",
"(",
"network_ref",
"[",
"'multi_host'",
"]",
"and",
"(",
"not",
"(",
"network_ref",
"[",
"'share_address'",
"]",
"or",
"CONF",
".",
"share_dhcp_address",
")",
")",
")",
":",
"gateway",
"=",
"network_ref",
"[",
"'dhcp_server'",
"]",
"hosts",
"=",
"[",
"]",
"if",
"CONF",
".",
"use_single_default_gateway",
":",
"for",
"fixedip",
"in",
"fixedips",
":",
"if",
"fixedip",
".",
"allocated",
":",
"vif_id",
"=",
"fixedip",
".",
"virtual_interface_id",
"if",
"fixedip",
".",
"default_route",
":",
"hosts",
".",
"append",
"(",
"_host_dhcp_opts",
"(",
"vif_id",
",",
"gateway",
")",
")",
"else",
":",
"hosts",
".",
"append",
"(",
"_host_dhcp_opts",
"(",
"vif_id",
")",
")",
"else",
":",
"hosts",
".",
"append",
"(",
"_host_dhcp_opts",
"(",
"None",
",",
"gateway",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"hosts",
")"
] | get networks hosts config in dhcp-opts format . | train | false |
37,267 | def _copy_r_to_cat(r, cat):
for key in r._attributes:
new_key = (u'validate_' + key)
cat[new_key] = r._attributes[key]
| [
"def",
"_copy_r_to_cat",
"(",
"r",
",",
"cat",
")",
":",
"for",
"key",
"in",
"r",
".",
"_attributes",
":",
"new_key",
"=",
"(",
"u'validate_'",
"+",
"key",
")",
"cat",
"[",
"new_key",
"]",
"=",
"r",
".",
"_attributes",
"[",
"key",
"]"
] | copy validation result attributes to given vo catalog . | train | false |
37,268 | @pytest.fixture
def nobody():
from django.contrib.auth import get_user_model
return get_user_model().objects.get_nobody_user()
| [
"@",
"pytest",
".",
"fixture",
"def",
"nobody",
"(",
")",
":",
"from",
"django",
".",
"contrib",
".",
"auth",
"import",
"get_user_model",
"return",
"get_user_model",
"(",
")",
".",
"objects",
".",
"get_nobody_user",
"(",
")"
] | require the default anonymous user . | train | false |
37,270 | def to_pgraster(rast):
if ((rast is None) or (rast == '')):
return
rasterheader = (1, 0, len(rast.bands), rast.scale.x, rast.scale.y, rast.origin.x, rast.origin.y, rast.skew.x, rast.skew.y, rast.srs.srid, rast.width, rast.height)
result = pack(POSTGIS_HEADER_STRUCTURE, rasterheader)
for band in rast.bands:
structure = ('B' + GDAL_TO_STRUCT[band.datatype()])
pixeltype = GDAL_TO_POSTGIS[band.datatype()]
if (band.nodata_value is not None):
pixeltype += 64
bandheader = pack(structure, (pixeltype, (band.nodata_value or 0)))
band_data_hex = binascii.hexlify(band.data(as_memoryview=True)).upper()
result += (bandheader + band_data_hex)
return result.decode()
| [
"def",
"to_pgraster",
"(",
"rast",
")",
":",
"if",
"(",
"(",
"rast",
"is",
"None",
")",
"or",
"(",
"rast",
"==",
"''",
")",
")",
":",
"return",
"rasterheader",
"=",
"(",
"1",
",",
"0",
",",
"len",
"(",
"rast",
".",
"bands",
")",
",",
"rast",
".",
"scale",
".",
"x",
",",
"rast",
".",
"scale",
".",
"y",
",",
"rast",
".",
"origin",
".",
"x",
",",
"rast",
".",
"origin",
".",
"y",
",",
"rast",
".",
"skew",
".",
"x",
",",
"rast",
".",
"skew",
".",
"y",
",",
"rast",
".",
"srs",
".",
"srid",
",",
"rast",
".",
"width",
",",
"rast",
".",
"height",
")",
"result",
"=",
"pack",
"(",
"POSTGIS_HEADER_STRUCTURE",
",",
"rasterheader",
")",
"for",
"band",
"in",
"rast",
".",
"bands",
":",
"structure",
"=",
"(",
"'B'",
"+",
"GDAL_TO_STRUCT",
"[",
"band",
".",
"datatype",
"(",
")",
"]",
")",
"pixeltype",
"=",
"GDAL_TO_POSTGIS",
"[",
"band",
".",
"datatype",
"(",
")",
"]",
"if",
"(",
"band",
".",
"nodata_value",
"is",
"not",
"None",
")",
":",
"pixeltype",
"+=",
"64",
"bandheader",
"=",
"pack",
"(",
"structure",
",",
"(",
"pixeltype",
",",
"(",
"band",
".",
"nodata_value",
"or",
"0",
")",
")",
")",
"band_data_hex",
"=",
"binascii",
".",
"hexlify",
"(",
"band",
".",
"data",
"(",
"as_memoryview",
"=",
"True",
")",
")",
".",
"upper",
"(",
")",
"result",
"+=",
"(",
"bandheader",
"+",
"band_data_hex",
")",
"return",
"result",
".",
"decode",
"(",
")"
] | convert a gdalraster into postgis raster format . | train | false |
37,271 | def get_all_dict(module):
if hasattr(module, '__all__'):
all_dict = copy.deepcopy(module.__all__)
else:
all_dict = copy.deepcopy(dir(module))
all_dict = [name for name in all_dict if (not name.startswith('_'))]
for name in ['absolute_import', 'division', 'print_function']:
try:
all_dict.remove(name)
except ValueError:
pass
all_dict = [name for name in all_dict if (not inspect.ismodule(getattr(module, name, None)))]
deprecated = []
not_deprecated = []
for name in all_dict:
f = getattr(module, name, None)
if (callable(f) and is_deprecated(f)):
deprecated.append(name)
else:
not_deprecated.append(name)
others = set(dir(module)).difference(set(deprecated)).difference(set(not_deprecated))
return (not_deprecated, deprecated, others)
| [
"def",
"get_all_dict",
"(",
"module",
")",
":",
"if",
"hasattr",
"(",
"module",
",",
"'__all__'",
")",
":",
"all_dict",
"=",
"copy",
".",
"deepcopy",
"(",
"module",
".",
"__all__",
")",
"else",
":",
"all_dict",
"=",
"copy",
".",
"deepcopy",
"(",
"dir",
"(",
"module",
")",
")",
"all_dict",
"=",
"[",
"name",
"for",
"name",
"in",
"all_dict",
"if",
"(",
"not",
"name",
".",
"startswith",
"(",
"'_'",
")",
")",
"]",
"for",
"name",
"in",
"[",
"'absolute_import'",
",",
"'division'",
",",
"'print_function'",
"]",
":",
"try",
":",
"all_dict",
".",
"remove",
"(",
"name",
")",
"except",
"ValueError",
":",
"pass",
"all_dict",
"=",
"[",
"name",
"for",
"name",
"in",
"all_dict",
"if",
"(",
"not",
"inspect",
".",
"ismodule",
"(",
"getattr",
"(",
"module",
",",
"name",
",",
"None",
")",
")",
")",
"]",
"deprecated",
"=",
"[",
"]",
"not_deprecated",
"=",
"[",
"]",
"for",
"name",
"in",
"all_dict",
":",
"f",
"=",
"getattr",
"(",
"module",
",",
"name",
",",
"None",
")",
"if",
"(",
"callable",
"(",
"f",
")",
"and",
"is_deprecated",
"(",
"f",
")",
")",
":",
"deprecated",
".",
"append",
"(",
"name",
")",
"else",
":",
"not_deprecated",
".",
"append",
"(",
"name",
")",
"others",
"=",
"set",
"(",
"dir",
"(",
"module",
")",
")",
".",
"difference",
"(",
"set",
"(",
"deprecated",
")",
")",
".",
"difference",
"(",
"set",
"(",
"not_deprecated",
")",
")",
"return",
"(",
"not_deprecated",
",",
"deprecated",
",",
"others",
")"
] | return a copy of the __all__ dict with irrelevant items removed . | train | true |
37,272 | def has_program(cmd, args=['--version']):
full_cmd = _convert_args(([cmd] + args))
try:
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(full_cmd, stderr=devnull, stdout=devnull, stdin=devnull)
except OSError:
return False
except subprocess.CalledProcessError:
return False
else:
return True
| [
"def",
"has_program",
"(",
"cmd",
",",
"args",
"=",
"[",
"'--version'",
"]",
")",
":",
"full_cmd",
"=",
"_convert_args",
"(",
"(",
"[",
"cmd",
"]",
"+",
"args",
")",
")",
"try",
":",
"with",
"open",
"(",
"os",
".",
"devnull",
",",
"'wb'",
")",
"as",
"devnull",
":",
"subprocess",
".",
"check_call",
"(",
"full_cmd",
",",
"stderr",
"=",
"devnull",
",",
"stdout",
"=",
"devnull",
",",
"stdin",
"=",
"devnull",
")",
"except",
"OSError",
":",
"return",
"False",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"return",
"False",
"else",
":",
"return",
"True"
] | returns true if cmd can be executed . | train | false |
37,274 | @lower_builtin('getitem', types.Buffer, types.Integer)
@lower_builtin('getitem', types.Buffer, types.SliceType)
def getitem_arraynd_intp(context, builder, sig, args):
(aryty, idxty) = sig.args
(ary, idx) = args
assert (aryty.ndim >= 1)
ary = make_array(aryty)(context, builder, ary)
res = _getitem_array_generic(context, builder, sig.return_type, aryty, ary, (idxty,), (idx,))
return impl_ret_borrowed(context, builder, sig.return_type, res)
| [
"@",
"lower_builtin",
"(",
"'getitem'",
",",
"types",
".",
"Buffer",
",",
"types",
".",
"Integer",
")",
"@",
"lower_builtin",
"(",
"'getitem'",
",",
"types",
".",
"Buffer",
",",
"types",
".",
"SliceType",
")",
"def",
"getitem_arraynd_intp",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"(",
"aryty",
",",
"idxty",
")",
"=",
"sig",
".",
"args",
"(",
"ary",
",",
"idx",
")",
"=",
"args",
"assert",
"(",
"aryty",
".",
"ndim",
">=",
"1",
")",
"ary",
"=",
"make_array",
"(",
"aryty",
")",
"(",
"context",
",",
"builder",
",",
"ary",
")",
"res",
"=",
"_getitem_array_generic",
"(",
"context",
",",
"builder",
",",
"sig",
".",
"return_type",
",",
"aryty",
",",
"ary",
",",
"(",
"idxty",
",",
")",
",",
"(",
"idx",
",",
")",
")",
"return",
"impl_ret_borrowed",
"(",
"context",
",",
"builder",
",",
"sig",
".",
"return_type",
",",
"res",
")"
] | basic indexing with an integer or a slice . | train | false |
37,275 | @pytest.fixture(scope='session')
def submissions():
from pootle_statistics.models import Submission
select_related = ('unit', 'quality_check', 'submitter', 'suggestion')
return {s.id: s for s in Submission.objects.select_related(*select_related).iterator()}
| [
"@",
"pytest",
".",
"fixture",
"(",
"scope",
"=",
"'session'",
")",
"def",
"submissions",
"(",
")",
":",
"from",
"pootle_statistics",
".",
"models",
"import",
"Submission",
"select_related",
"=",
"(",
"'unit'",
",",
"'quality_check'",
",",
"'submitter'",
",",
"'suggestion'",
")",
"return",
"{",
"s",
".",
"id",
":",
"s",
"for",
"s",
"in",
"Submission",
".",
"objects",
".",
"select_related",
"(",
"*",
"select_related",
")",
".",
"iterator",
"(",
")",
"}"
] | a dictionary of submission . | train | false |
37,276 | def addGroove(derivation, negatives):
copyShallow = derivation.elementNode.getCopyShallow()
extrude.setElementNodeToEndStart(copyShallow, Vector3((- derivation.demilength)), Vector3(derivation.demilength))
extrudeDerivation = extrude.ExtrudeDerivation(copyShallow)
bottom = (derivation.demiheight - (0.5 * derivation.grooveWidth))
outside = derivation.demiwidth
top = derivation.demiheight
leftGroove = [complex((- outside), bottom), complex((- derivation.innerDemiwidth), derivation.demiheight), complex((- outside), top)]
rightGroove = [complex(outside, top), complex(derivation.innerDemiwidth, derivation.demiheight), complex(outside, bottom)]
groovesComplex = [leftGroove, rightGroove]
groovesVector3 = euclidean.getVector3Paths(groovesComplex)
extrude.addPositives(extrudeDerivation, groovesVector3, negatives)
| [
"def",
"addGroove",
"(",
"derivation",
",",
"negatives",
")",
":",
"copyShallow",
"=",
"derivation",
".",
"elementNode",
".",
"getCopyShallow",
"(",
")",
"extrude",
".",
"setElementNodeToEndStart",
"(",
"copyShallow",
",",
"Vector3",
"(",
"(",
"-",
"derivation",
".",
"demilength",
")",
")",
",",
"Vector3",
"(",
"derivation",
".",
"demilength",
")",
")",
"extrudeDerivation",
"=",
"extrude",
".",
"ExtrudeDerivation",
"(",
"copyShallow",
")",
"bottom",
"=",
"(",
"derivation",
".",
"demiheight",
"-",
"(",
"0.5",
"*",
"derivation",
".",
"grooveWidth",
")",
")",
"outside",
"=",
"derivation",
".",
"demiwidth",
"top",
"=",
"derivation",
".",
"demiheight",
"leftGroove",
"=",
"[",
"complex",
"(",
"(",
"-",
"outside",
")",
",",
"bottom",
")",
",",
"complex",
"(",
"(",
"-",
"derivation",
".",
"innerDemiwidth",
")",
",",
"derivation",
".",
"demiheight",
")",
",",
"complex",
"(",
"(",
"-",
"outside",
")",
",",
"top",
")",
"]",
"rightGroove",
"=",
"[",
"complex",
"(",
"outside",
",",
"top",
")",
",",
"complex",
"(",
"derivation",
".",
"innerDemiwidth",
",",
"derivation",
".",
"demiheight",
")",
",",
"complex",
"(",
"outside",
",",
"bottom",
")",
"]",
"groovesComplex",
"=",
"[",
"leftGroove",
",",
"rightGroove",
"]",
"groovesVector3",
"=",
"euclidean",
".",
"getVector3Paths",
"(",
"groovesComplex",
")",
"extrude",
".",
"addPositives",
"(",
"extrudeDerivation",
",",
"groovesVector3",
",",
"negatives",
")"
] | add groove on each side of cage . | train | false |
37,278 | def init(mpstate):
return SerialModule(mpstate)
| [
"def",
"init",
"(",
"mpstate",
")",
":",
"return",
"SerialModule",
"(",
"mpstate",
")"
] | connect mappings to the database . | train | false |
37,279 | def _run_subsuite(args):
(runner_class, subsuite_index, subsuite, failfast) = args
runner = runner_class(failfast=failfast)
result = runner.run(subsuite)
return (subsuite_index, result.events)
| [
"def",
"_run_subsuite",
"(",
"args",
")",
":",
"(",
"runner_class",
",",
"subsuite_index",
",",
"subsuite",
",",
"failfast",
")",
"=",
"args",
"runner",
"=",
"runner_class",
"(",
"failfast",
"=",
"failfast",
")",
"result",
"=",
"runner",
".",
"run",
"(",
"subsuite",
")",
"return",
"(",
"subsuite_index",
",",
"result",
".",
"events",
")"
] | run a suite of tests with a remotetestrunner and return a remotetestresult . | train | false |
37,281 | def getNewRepository():
return ExportRepository()
| [
"def",
"getNewRepository",
"(",
")",
":",
"return",
"ExportRepository",
"(",
")"
] | get new repository . | train | false |
37,282 | def get_array_memory_extents(context, builder, arrty, arr, shapes, strides, data):
(lower, upper) = offset_bounds_from_strides(context, builder, arrty, arr, shapes, strides)
return compute_memory_extents(context, builder, lower, upper, data)
| [
"def",
"get_array_memory_extents",
"(",
"context",
",",
"builder",
",",
"arrty",
",",
"arr",
",",
"shapes",
",",
"strides",
",",
"data",
")",
":",
"(",
"lower",
",",
"upper",
")",
"=",
"offset_bounds_from_strides",
"(",
"context",
",",
"builder",
",",
"arrty",
",",
"arr",
",",
"shapes",
",",
"strides",
")",
"return",
"compute_memory_extents",
"(",
"context",
",",
"builder",
",",
"lower",
",",
"upper",
",",
"data",
")"
] | compute a half-open range [start . | train | false |
37,284 | def get_dev_prefix_for_disk_bus(disk_bus):
if CONF.libvirt_disk_prefix:
return CONF.libvirt_disk_prefix
if (disk_bus == 'ide'):
return 'hd'
elif (disk_bus == 'virtio'):
return 'vd'
elif (disk_bus == 'xen'):
return 'sd'
elif (disk_bus == 'scsi'):
return 'sd'
elif (disk_bus == 'usb'):
return 'sd'
elif (disk_bus == 'uml'):
return 'ubd'
elif (disk_bus == 'lxc'):
return None
else:
raise exception.NovaException((_('Unable to determine disk prefix for %s') % disk_bus))
| [
"def",
"get_dev_prefix_for_disk_bus",
"(",
"disk_bus",
")",
":",
"if",
"CONF",
".",
"libvirt_disk_prefix",
":",
"return",
"CONF",
".",
"libvirt_disk_prefix",
"if",
"(",
"disk_bus",
"==",
"'ide'",
")",
":",
"return",
"'hd'",
"elif",
"(",
"disk_bus",
"==",
"'virtio'",
")",
":",
"return",
"'vd'",
"elif",
"(",
"disk_bus",
"==",
"'xen'",
")",
":",
"return",
"'sd'",
"elif",
"(",
"disk_bus",
"==",
"'scsi'",
")",
":",
"return",
"'sd'",
"elif",
"(",
"disk_bus",
"==",
"'usb'",
")",
":",
"return",
"'sd'",
"elif",
"(",
"disk_bus",
"==",
"'uml'",
")",
":",
"return",
"'ubd'",
"elif",
"(",
"disk_bus",
"==",
"'lxc'",
")",
":",
"return",
"None",
"else",
":",
"raise",
"exception",
".",
"NovaException",
"(",
"(",
"_",
"(",
"'Unable to determine disk prefix for %s'",
")",
"%",
"disk_bus",
")",
")"
] | determine the dev prefix for a disk bus . | train | false |
37,285 | @require_context
def virtual_interface_get_by_uuid(context, vif_uuid):
vif_ref = _virtual_interface_query(context).filter_by(uuid=vif_uuid).first()
return vif_ref
| [
"@",
"require_context",
"def",
"virtual_interface_get_by_uuid",
"(",
"context",
",",
"vif_uuid",
")",
":",
"vif_ref",
"=",
"_virtual_interface_query",
"(",
"context",
")",
".",
"filter_by",
"(",
"uuid",
"=",
"vif_uuid",
")",
".",
"first",
"(",
")",
"return",
"vif_ref"
] | gets a virtual interface from the table filtering on vif uuid . | train | false |
37,286 | def in6_ismaddr(str):
return in6_isincluded(str, 'ff00::', 8)
| [
"def",
"in6_ismaddr",
"(",
"str",
")",
":",
"return",
"in6_isincluded",
"(",
"str",
",",
"'ff00::'",
",",
"8",
")"
] | returns true if provided address in printable format belongs to allocated multicast address space . | train | false |
37,287 | def log_rate_limits(target, resp, level=20):
rate_limits = resp['rateLimits']
resetsAt = dt_util.parse_datetime(rate_limits['resetsAt'])
resetsAtTime = (resetsAt - datetime.now(timezone.utc))
rate_limit_msg = 'iOS push notification rate limits for %s: %d sent, %d allowed, %d errors, resets in %s'
_LOGGER.log(level, rate_limit_msg, ios.device_name_for_push_id(target), rate_limits['successful'], rate_limits['maximum'], rate_limits['errors'], str(resetsAtTime).split('.')[0])
| [
"def",
"log_rate_limits",
"(",
"target",
",",
"resp",
",",
"level",
"=",
"20",
")",
":",
"rate_limits",
"=",
"resp",
"[",
"'rateLimits'",
"]",
"resetsAt",
"=",
"dt_util",
".",
"parse_datetime",
"(",
"rate_limits",
"[",
"'resetsAt'",
"]",
")",
"resetsAtTime",
"=",
"(",
"resetsAt",
"-",
"datetime",
".",
"now",
"(",
"timezone",
".",
"utc",
")",
")",
"rate_limit_msg",
"=",
"'iOS push notification rate limits for %s: %d sent, %d allowed, %d errors, resets in %s'",
"_LOGGER",
".",
"log",
"(",
"level",
",",
"rate_limit_msg",
",",
"ios",
".",
"device_name_for_push_id",
"(",
"target",
")",
",",
"rate_limits",
"[",
"'successful'",
"]",
",",
"rate_limits",
"[",
"'maximum'",
"]",
",",
"rate_limits",
"[",
"'errors'",
"]",
",",
"str",
"(",
"resetsAtTime",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")"
] | output rate limit log line at given level . | train | false |
37,288 | def searchForNeededEpisodes():
results = []
for curShow in sickrage.srCore.SHOWLIST:
if curShow.paused:
continue
episodes = wantedEpisodes(curShow, date.fromordinal(1))
result = searchProviders(curShow, episodes, cacheOnly=True)
if result:
results += result
return results
| [
"def",
"searchForNeededEpisodes",
"(",
")",
":",
"results",
"=",
"[",
"]",
"for",
"curShow",
"in",
"sickrage",
".",
"srCore",
".",
"SHOWLIST",
":",
"if",
"curShow",
".",
"paused",
":",
"continue",
"episodes",
"=",
"wantedEpisodes",
"(",
"curShow",
",",
"date",
".",
"fromordinal",
"(",
"1",
")",
")",
"result",
"=",
"searchProviders",
"(",
"curShow",
",",
"episodes",
",",
"cacheOnly",
"=",
"True",
")",
"if",
"result",
":",
"results",
"+=",
"result",
"return",
"results"
] | check providers for details on wanted episodes :return: episodes we have a search hit for . | train | false |
37,290 | @pytest.fixture
def url_widget(qtbot, monkeypatch, config_stub):
config_stub.data = {'colors': {'statusbar.url.bg': 'white', 'statusbar.url.fg': 'black', 'statusbar.url.fg.success': 'yellow', 'statusbar.url.fg.success.https': 'green', 'statusbar.url.fg.error': 'red', 'statusbar.url.fg.warn': 'orange', 'statusbar.url.fg.hover': 'blue'}, 'fonts': {}}
monkeypatch.setattr('qutebrowser.mainwindow.statusbar.url.style.config', config_stub)
widget = url.UrlText()
qtbot.add_widget(widget)
assert (not widget.isVisible())
return widget
| [
"@",
"pytest",
".",
"fixture",
"def",
"url_widget",
"(",
"qtbot",
",",
"monkeypatch",
",",
"config_stub",
")",
":",
"config_stub",
".",
"data",
"=",
"{",
"'colors'",
":",
"{",
"'statusbar.url.bg'",
":",
"'white'",
",",
"'statusbar.url.fg'",
":",
"'black'",
",",
"'statusbar.url.fg.success'",
":",
"'yellow'",
",",
"'statusbar.url.fg.success.https'",
":",
"'green'",
",",
"'statusbar.url.fg.error'",
":",
"'red'",
",",
"'statusbar.url.fg.warn'",
":",
"'orange'",
",",
"'statusbar.url.fg.hover'",
":",
"'blue'",
"}",
",",
"'fonts'",
":",
"{",
"}",
"}",
"monkeypatch",
".",
"setattr",
"(",
"'qutebrowser.mainwindow.statusbar.url.style.config'",
",",
"config_stub",
")",
"widget",
"=",
"url",
".",
"UrlText",
"(",
")",
"qtbot",
".",
"add_widget",
"(",
"widget",
")",
"assert",
"(",
"not",
"widget",
".",
"isVisible",
"(",
")",
")",
"return",
"widget"
] | fixture providing a url widget . | train | false |
37,291 | def get_server_certs(iam, name=None):
results = dict()
try:
if name:
server_certs = [iam.get_server_certificate(ServerCertificateName=name)['ServerCertificate']]
else:
server_certs = iam.list_server_certificates()['ServerCertificateMetadataList']
for server_cert in server_certs:
if (not name):
server_cert = iam.get_server_certificate(ServerCertificateName=server_cert['ServerCertificateName'])['ServerCertificate']
cert_md = server_cert['ServerCertificateMetadata']
results[cert_md['ServerCertificateName']] = {'certificate_body': server_cert['CertificateBody'], 'server_certificate_id': cert_md['ServerCertificateId'], 'server_certificate_name': cert_md['ServerCertificateName'], 'arn': cert_md['Arn'], 'path': cert_md['Path'], 'expiration': cert_md['Expiration'].isoformat(), 'upload_date': cert_md['UploadDate'].isoformat()}
except botocore.exceptions.ClientError:
pass
return results
| [
"def",
"get_server_certs",
"(",
"iam",
",",
"name",
"=",
"None",
")",
":",
"results",
"=",
"dict",
"(",
")",
"try",
":",
"if",
"name",
":",
"server_certs",
"=",
"[",
"iam",
".",
"get_server_certificate",
"(",
"ServerCertificateName",
"=",
"name",
")",
"[",
"'ServerCertificate'",
"]",
"]",
"else",
":",
"server_certs",
"=",
"iam",
".",
"list_server_certificates",
"(",
")",
"[",
"'ServerCertificateMetadataList'",
"]",
"for",
"server_cert",
"in",
"server_certs",
":",
"if",
"(",
"not",
"name",
")",
":",
"server_cert",
"=",
"iam",
".",
"get_server_certificate",
"(",
"ServerCertificateName",
"=",
"server_cert",
"[",
"'ServerCertificateName'",
"]",
")",
"[",
"'ServerCertificate'",
"]",
"cert_md",
"=",
"server_cert",
"[",
"'ServerCertificateMetadata'",
"]",
"results",
"[",
"cert_md",
"[",
"'ServerCertificateName'",
"]",
"]",
"=",
"{",
"'certificate_body'",
":",
"server_cert",
"[",
"'CertificateBody'",
"]",
",",
"'server_certificate_id'",
":",
"cert_md",
"[",
"'ServerCertificateId'",
"]",
",",
"'server_certificate_name'",
":",
"cert_md",
"[",
"'ServerCertificateName'",
"]",
",",
"'arn'",
":",
"cert_md",
"[",
"'Arn'",
"]",
",",
"'path'",
":",
"cert_md",
"[",
"'Path'",
"]",
",",
"'expiration'",
":",
"cert_md",
"[",
"'Expiration'",
"]",
".",
"isoformat",
"(",
")",
",",
"'upload_date'",
":",
"cert_md",
"[",
"'UploadDate'",
"]",
".",
"isoformat",
"(",
")",
"}",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
":",
"pass",
"return",
"results"
] | retrieve the attributes of a server certificate if it exists or all certs . | train | false |
37,292 | def create_xml_runner(test_pkg, test_name, results_file=None, is_rostest=False):
test_name = os.path.basename(test_name)
if (not results_file):
results_file = xml_results_file(test_pkg, test_name, is_rostest)
test_dir = os.path.abspath(os.path.dirname(results_file))
if (not os.path.exists(test_dir)):
try:
makedirs_with_parent_perms(test_dir)
except OSError as error:
raise IOError(('cannot create test results directory [%s]: %s' % (test_dir, str(error))))
elif os.path.isfile(test_dir):
raise Exception(('ERROR: cannot run test suite, file is preventing creation of test dir: %s' % test_dir))
print(('[ROSUNIT] Outputting test results to ' + results_file))
outstream = open(results_file, 'w')
outstream.write('<?xml version="1.0" encoding="utf-8"?>\n')
return XMLTestRunner(stream=outstream)
| [
"def",
"create_xml_runner",
"(",
"test_pkg",
",",
"test_name",
",",
"results_file",
"=",
"None",
",",
"is_rostest",
"=",
"False",
")",
":",
"test_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"test_name",
")",
"if",
"(",
"not",
"results_file",
")",
":",
"results_file",
"=",
"xml_results_file",
"(",
"test_pkg",
",",
"test_name",
",",
"is_rostest",
")",
"test_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"results_file",
")",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"test_dir",
")",
")",
":",
"try",
":",
"makedirs_with_parent_perms",
"(",
"test_dir",
")",
"except",
"OSError",
"as",
"error",
":",
"raise",
"IOError",
"(",
"(",
"'cannot create test results directory [%s]: %s'",
"%",
"(",
"test_dir",
",",
"str",
"(",
"error",
")",
")",
")",
")",
"elif",
"os",
".",
"path",
".",
"isfile",
"(",
"test_dir",
")",
":",
"raise",
"Exception",
"(",
"(",
"'ERROR: cannot run test suite, file is preventing creation of test dir: %s'",
"%",
"test_dir",
")",
")",
"print",
"(",
"(",
"'[ROSUNIT] Outputting test results to '",
"+",
"results_file",
")",
")",
"outstream",
"=",
"open",
"(",
"results_file",
",",
"'w'",
")",
"outstream",
".",
"write",
"(",
"'<?xml version=\"1.0\" encoding=\"utf-8\"?>\\n'",
")",
"return",
"XMLTestRunner",
"(",
"stream",
"=",
"outstream",
")"
] | create the unittest test runner with xml output . | train | false |
37,294 | def test_user_link_unicode():
u = UserProfile(username=u'jm\xfcller', display_name=u'J\xfcrgen M\xfcller', pk=1)
assert (user_link(u) == (u'<a href="%s" title="%s">J\xfcrgen M\xfcller</a>' % (u.get_url_path(), u.name)))
u = UserProfile(username='\xe5\xaf\x92\xe6\x98\x9f', pk=1)
assert (user_link(u) == (u'<a href="%s" title="%s">%s</a>' % (u.get_url_path(), u.name, u.username)))
| [
"def",
"test_user_link_unicode",
"(",
")",
":",
"u",
"=",
"UserProfile",
"(",
"username",
"=",
"u'jm\\xfcller'",
",",
"display_name",
"=",
"u'J\\xfcrgen M\\xfcller'",
",",
"pk",
"=",
"1",
")",
"assert",
"(",
"user_link",
"(",
"u",
")",
"==",
"(",
"u'<a href=\"%s\" title=\"%s\">J\\xfcrgen M\\xfcller</a>'",
"%",
"(",
"u",
".",
"get_url_path",
"(",
")",
",",
"u",
".",
"name",
")",
")",
")",
"u",
"=",
"UserProfile",
"(",
"username",
"=",
"'\\xe5\\xaf\\x92\\xe6\\x98\\x9f'",
",",
"pk",
"=",
"1",
")",
"assert",
"(",
"user_link",
"(",
"u",
")",
"==",
"(",
"u'<a href=\"%s\" title=\"%s\">%s</a>'",
"%",
"(",
"u",
".",
"get_url_path",
"(",
")",
",",
"u",
".",
"name",
",",
"u",
".",
"username",
")",
")",
")"
] | make sure helper wont choke on unicode input . | train | false |
37,295 | def tempnam_no_warning(*args):
return os.tempnam(*args)
| [
"def",
"tempnam_no_warning",
"(",
"*",
"args",
")",
":",
"return",
"os",
".",
"tempnam",
"(",
"*",
"args",
")"
] | an os . | train | false |
37,296 | def make_labeled_form_entry(parent, label, entrywidth=20, entryheight=1, labelwidth=0, borderwidth=None, takefocus=None):
if (label and (label[(-1)] != ':')):
label = (label + ':')
frame = Frame(parent)
label = Label(frame, text=label, width=labelwidth, anchor=E)
label.pack(side=LEFT)
if (entryheight == 1):
if (borderwidth is None):
entry = Entry(frame, relief=SUNKEN, width=entrywidth)
else:
entry = Entry(frame, relief=SUNKEN, width=entrywidth, borderwidth=borderwidth)
entry.pack(side=RIGHT, expand=1, fill=X)
frame.pack(fill=X)
else:
entry = make_text_box(frame, entrywidth, entryheight, 1, 1, takefocus=takefocus)
frame.pack(fill=BOTH, expand=1)
return (entry, frame, label)
| [
"def",
"make_labeled_form_entry",
"(",
"parent",
",",
"label",
",",
"entrywidth",
"=",
"20",
",",
"entryheight",
"=",
"1",
",",
"labelwidth",
"=",
"0",
",",
"borderwidth",
"=",
"None",
",",
"takefocus",
"=",
"None",
")",
":",
"if",
"(",
"label",
"and",
"(",
"label",
"[",
"(",
"-",
"1",
")",
"]",
"!=",
"':'",
")",
")",
":",
"label",
"=",
"(",
"label",
"+",
"':'",
")",
"frame",
"=",
"Frame",
"(",
"parent",
")",
"label",
"=",
"Label",
"(",
"frame",
",",
"text",
"=",
"label",
",",
"width",
"=",
"labelwidth",
",",
"anchor",
"=",
"E",
")",
"label",
".",
"pack",
"(",
"side",
"=",
"LEFT",
")",
"if",
"(",
"entryheight",
"==",
"1",
")",
":",
"if",
"(",
"borderwidth",
"is",
"None",
")",
":",
"entry",
"=",
"Entry",
"(",
"frame",
",",
"relief",
"=",
"SUNKEN",
",",
"width",
"=",
"entrywidth",
")",
"else",
":",
"entry",
"=",
"Entry",
"(",
"frame",
",",
"relief",
"=",
"SUNKEN",
",",
"width",
"=",
"entrywidth",
",",
"borderwidth",
"=",
"borderwidth",
")",
"entry",
".",
"pack",
"(",
"side",
"=",
"RIGHT",
",",
"expand",
"=",
"1",
",",
"fill",
"=",
"X",
")",
"frame",
".",
"pack",
"(",
"fill",
"=",
"X",
")",
"else",
":",
"entry",
"=",
"make_text_box",
"(",
"frame",
",",
"entrywidth",
",",
"entryheight",
",",
"1",
",",
"1",
",",
"takefocus",
"=",
"takefocus",
")",
"frame",
".",
"pack",
"(",
"fill",
"=",
"BOTH",
",",
"expand",
"=",
"1",
")",
"return",
"(",
"entry",
",",
"frame",
",",
"label",
")"
] | subroutine to create a form entry . | train | false |
37,298 | def blame_upstream(registry, xml_parent, data):
XML.SubElement(xml_parent, 'hudson.plugins.blame__upstream__commiters.BlameUpstreamCommitersPublisher')
| [
"def",
"blame_upstream",
"(",
"registry",
",",
"xml_parent",
",",
"data",
")",
":",
"XML",
".",
"SubElement",
"(",
"xml_parent",
",",
"'hudson.plugins.blame__upstream__commiters.BlameUpstreamCommitersPublisher'",
")"
] | yaml: blame-upstream notify upstream commiters when build fails requires the jenkins :jenkins-wiki:blame upstream commiters plugin <blame+upstream+committers+plugin> . | train | false |
37,299 | def _get_drivers():
global _drivers
if (_drivers is None):
_drivers = {}
for notification_driver in CONF.notification_driver:
add_driver(notification_driver)
return _drivers.values()
| [
"def",
"_get_drivers",
"(",
")",
":",
"global",
"_drivers",
"if",
"(",
"_drivers",
"is",
"None",
")",
":",
"_drivers",
"=",
"{",
"}",
"for",
"notification_driver",
"in",
"CONF",
".",
"notification_driver",
":",
"add_driver",
"(",
"notification_driver",
")",
"return",
"_drivers",
".",
"values",
"(",
")"
] | instantiates and returns drivers based on the flag values . | train | false |
37,300 | def localSslFixup(host, sslContext):
if ((not sslContext) and (host in ['localhost', '127.0.0.1', '::1'])):
import ssl
if hasattr(ssl, '_create_unverified_context'):
sslContext = ssl._create_unverified_context()
return sslContext
| [
"def",
"localSslFixup",
"(",
"host",
",",
"sslContext",
")",
":",
"if",
"(",
"(",
"not",
"sslContext",
")",
"and",
"(",
"host",
"in",
"[",
"'localhost'",
",",
"'127.0.0.1'",
",",
"'::1'",
"]",
")",
")",
":",
"import",
"ssl",
"if",
"hasattr",
"(",
"ssl",
",",
"'_create_unverified_context'",
")",
":",
"sslContext",
"=",
"ssl",
".",
"_create_unverified_context",
"(",
")",
"return",
"sslContext"
] | connections to localhost do not need ssl verification as a certificate will never match . | train | true |
37,301 | @verbose
def filter_data(data, sfreq, l_freq, h_freq, picks=None, filter_length='auto', l_trans_bandwidth='auto', h_trans_bandwidth='auto', n_jobs=1, method='fir', iir_params=None, copy=True, phase='zero', fir_window='hamming', verbose=None):
if (not isinstance(data, np.ndarray)):
raise ValueError('data must be an array')
(iir_params, method) = _check_method(method, iir_params)
filt = create_filter(data, sfreq, l_freq, h_freq, filter_length, l_trans_bandwidth, h_trans_bandwidth, method, iir_params, phase, fir_window)
if (method in ('fir', 'fft')):
data = _overlap_add_filter(data, filt, None, phase, picks, n_jobs, copy)
else:
data = _filtfilt(data, filt, picks, n_jobs, copy)
return data
| [
"@",
"verbose",
"def",
"filter_data",
"(",
"data",
",",
"sfreq",
",",
"l_freq",
",",
"h_freq",
",",
"picks",
"=",
"None",
",",
"filter_length",
"=",
"'auto'",
",",
"l_trans_bandwidth",
"=",
"'auto'",
",",
"h_trans_bandwidth",
"=",
"'auto'",
",",
"n_jobs",
"=",
"1",
",",
"method",
"=",
"'fir'",
",",
"iir_params",
"=",
"None",
",",
"copy",
"=",
"True",
",",
"phase",
"=",
"'zero'",
",",
"fir_window",
"=",
"'hamming'",
",",
"verbose",
"=",
"None",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"data",
",",
"np",
".",
"ndarray",
")",
")",
":",
"raise",
"ValueError",
"(",
"'data must be an array'",
")",
"(",
"iir_params",
",",
"method",
")",
"=",
"_check_method",
"(",
"method",
",",
"iir_params",
")",
"filt",
"=",
"create_filter",
"(",
"data",
",",
"sfreq",
",",
"l_freq",
",",
"h_freq",
",",
"filter_length",
",",
"l_trans_bandwidth",
",",
"h_trans_bandwidth",
",",
"method",
",",
"iir_params",
",",
"phase",
",",
"fir_window",
")",
"if",
"(",
"method",
"in",
"(",
"'fir'",
",",
"'fft'",
")",
")",
":",
"data",
"=",
"_overlap_add_filter",
"(",
"data",
",",
"filt",
",",
"None",
",",
"phase",
",",
"picks",
",",
"n_jobs",
",",
"copy",
")",
"else",
":",
"data",
"=",
"_filtfilt",
"(",
"data",
",",
"filt",
",",
"picks",
",",
"n_jobs",
",",
"copy",
")",
"return",
"data"
] | filter a subset of channels . | train | false |
37,303 | def _find_compound_unit(numerator_unit, denominator_unit, locale=LC_NUMERIC):
locale = Locale.parse(locale)
numerator_unit = _find_unit_pattern(numerator_unit, locale=locale)
denominator_unit = _find_unit_pattern(denominator_unit, locale=locale)
if (not (numerator_unit and denominator_unit)):
return None
bare_numerator_unit = numerator_unit.split('-', 1)[(-1)]
bare_denominator_unit = denominator_unit.split('-', 1)[(-1)]
return _find_unit_pattern(('%s-per-%s' % (bare_numerator_unit, bare_denominator_unit)), locale=locale)
| [
"def",
"_find_compound_unit",
"(",
"numerator_unit",
",",
"denominator_unit",
",",
"locale",
"=",
"LC_NUMERIC",
")",
":",
"locale",
"=",
"Locale",
".",
"parse",
"(",
"locale",
")",
"numerator_unit",
"=",
"_find_unit_pattern",
"(",
"numerator_unit",
",",
"locale",
"=",
"locale",
")",
"denominator_unit",
"=",
"_find_unit_pattern",
"(",
"denominator_unit",
",",
"locale",
"=",
"locale",
")",
"if",
"(",
"not",
"(",
"numerator_unit",
"and",
"denominator_unit",
")",
")",
":",
"return",
"None",
"bare_numerator_unit",
"=",
"numerator_unit",
".",
"split",
"(",
"'-'",
",",
"1",
")",
"[",
"(",
"-",
"1",
")",
"]",
"bare_denominator_unit",
"=",
"denominator_unit",
".",
"split",
"(",
"'-'",
",",
"1",
")",
"[",
"(",
"-",
"1",
")",
"]",
"return",
"_find_unit_pattern",
"(",
"(",
"'%s-per-%s'",
"%",
"(",
"bare_numerator_unit",
",",
"bare_denominator_unit",
")",
")",
",",
"locale",
"=",
"locale",
")"
] | find a predefined compound unit pattern . | train | false |
37,304 | @allow_unvouched
@never_cache
def delete_email(request, email_pk):
user = User.objects.get(pk=request.user.id)
profile = user.userprofile
if (not ExternalAccount.objects.filter(user=profile, pk=email_pk).exists()):
raise Http404()
ExternalAccount.objects.get(pk=email_pk).delete()
return redirect('phonebook:profile_edit')
| [
"@",
"allow_unvouched",
"@",
"never_cache",
"def",
"delete_email",
"(",
"request",
",",
"email_pk",
")",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"request",
".",
"user",
".",
"id",
")",
"profile",
"=",
"user",
".",
"userprofile",
"if",
"(",
"not",
"ExternalAccount",
".",
"objects",
".",
"filter",
"(",
"user",
"=",
"profile",
",",
"pk",
"=",
"email_pk",
")",
".",
"exists",
"(",
")",
")",
":",
"raise",
"Http404",
"(",
")",
"ExternalAccount",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"email_pk",
")",
".",
"delete",
"(",
")",
"return",
"redirect",
"(",
"'phonebook:profile_edit'",
")"
] | delete alternate email address . | train | false |
37,305 | def pillars(opts, functions, context=None):
ret = LazyLoader(_module_dirs(opts, 'pillar'), opts, tag='pillar', pack={'__salt__': functions, '__context__': context, '__utils__': utils(opts)})
ret.pack['__ext_pillar__'] = ret
return FilterDictWrapper(ret, '.ext_pillar')
| [
"def",
"pillars",
"(",
"opts",
",",
"functions",
",",
"context",
"=",
"None",
")",
":",
"ret",
"=",
"LazyLoader",
"(",
"_module_dirs",
"(",
"opts",
",",
"'pillar'",
")",
",",
"opts",
",",
"tag",
"=",
"'pillar'",
",",
"pack",
"=",
"{",
"'__salt__'",
":",
"functions",
",",
"'__context__'",
":",
"context",
",",
"'__utils__'",
":",
"utils",
"(",
"opts",
")",
"}",
")",
"ret",
".",
"pack",
"[",
"'__ext_pillar__'",
"]",
"=",
"ret",
"return",
"FilterDictWrapper",
"(",
"ret",
",",
"'.ext_pillar'",
")"
] | returns the pillars modules . | train | true |
37,306 | def _pull_status(data, item):
def _already_exists(id_):
'\n Layer already exists\n '
already_pulled = data.setdefault('Layers', {}).setdefault('Already_Pulled', [])
if (id_ not in already_pulled):
already_pulled.append(id_)
def _new_layer(id_):
'\n Pulled a new layer\n '
pulled = data.setdefault('Layers', {}).setdefault('Pulled', [])
if (id_ not in pulled):
pulled.append(id_)
if ('dockerng._pull_status' not in __context__):
log.warning('_pull_status context variable was not populated, information on downloaded layers may be inaccurate. Please report this to the SaltStack development team, and if possible include the image (and tag) that was being pulled.')
__context__['dockerng._pull_status'] = NOTSET
status = item['status']
if (status == 'Already exists'):
_already_exists(item['id'])
elif (status in 'Pull complete'):
_new_layer(item['id'])
elif status.startswith('Status: '):
data['Status'] = status[8:]
elif (status == 'Download complete'):
if (__context__['dockerng._pull_status'] is not NOTSET):
id_ = item['id']
if (id_ in __context__['dockerng._pull_status']):
_already_exists(id_)
else:
_new_layer(id_)
| [
"def",
"_pull_status",
"(",
"data",
",",
"item",
")",
":",
"def",
"_already_exists",
"(",
"id_",
")",
":",
"already_pulled",
"=",
"data",
".",
"setdefault",
"(",
"'Layers'",
",",
"{",
"}",
")",
".",
"setdefault",
"(",
"'Already_Pulled'",
",",
"[",
"]",
")",
"if",
"(",
"id_",
"not",
"in",
"already_pulled",
")",
":",
"already_pulled",
".",
"append",
"(",
"id_",
")",
"def",
"_new_layer",
"(",
"id_",
")",
":",
"pulled",
"=",
"data",
".",
"setdefault",
"(",
"'Layers'",
",",
"{",
"}",
")",
".",
"setdefault",
"(",
"'Pulled'",
",",
"[",
"]",
")",
"if",
"(",
"id_",
"not",
"in",
"pulled",
")",
":",
"pulled",
".",
"append",
"(",
"id_",
")",
"if",
"(",
"'dockerng._pull_status'",
"not",
"in",
"__context__",
")",
":",
"log",
".",
"warning",
"(",
"'_pull_status context variable was not populated, information on downloaded layers may be inaccurate. Please report this to the SaltStack development team, and if possible include the image (and tag) that was being pulled.'",
")",
"__context__",
"[",
"'dockerng._pull_status'",
"]",
"=",
"NOTSET",
"status",
"=",
"item",
"[",
"'status'",
"]",
"if",
"(",
"status",
"==",
"'Already exists'",
")",
":",
"_already_exists",
"(",
"item",
"[",
"'id'",
"]",
")",
"elif",
"(",
"status",
"in",
"'Pull complete'",
")",
":",
"_new_layer",
"(",
"item",
"[",
"'id'",
"]",
")",
"elif",
"status",
".",
"startswith",
"(",
"'Status: '",
")",
":",
"data",
"[",
"'Status'",
"]",
"=",
"status",
"[",
"8",
":",
"]",
"elif",
"(",
"status",
"==",
"'Download complete'",
")",
":",
"if",
"(",
"__context__",
"[",
"'dockerng._pull_status'",
"]",
"is",
"not",
"NOTSET",
")",
":",
"id_",
"=",
"item",
"[",
"'id'",
"]",
"if",
"(",
"id_",
"in",
"__context__",
"[",
"'dockerng._pull_status'",
"]",
")",
":",
"_already_exists",
"(",
"id_",
")",
"else",
":",
"_new_layer",
"(",
"id_",
")"
] | process a status update from a docker pull . | train | true |
37,307 | def common_params(task_instance, task_cls):
if (not isinstance(task_cls, task.Register)):
raise TypeError('task_cls must be an uninstantiated Task')
task_instance_param_names = dict(task_instance.get_params()).keys()
task_cls_params_dict = dict(task_cls.get_params())
task_cls_param_names = task_cls_params_dict.keys()
common_param_names = set(task_instance_param_names).intersection(set(task_cls_param_names))
common_param_vals = [(key, task_cls_params_dict[key]) for key in common_param_names]
common_kwargs = dict(((key, task_instance.param_kwargs[key]) for key in common_param_names))
vals = dict(task_instance.get_param_values(common_param_vals, [], common_kwargs))
return vals
| [
"def",
"common_params",
"(",
"task_instance",
",",
"task_cls",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"task_cls",
",",
"task",
".",
"Register",
")",
")",
":",
"raise",
"TypeError",
"(",
"'task_cls must be an uninstantiated Task'",
")",
"task_instance_param_names",
"=",
"dict",
"(",
"task_instance",
".",
"get_params",
"(",
")",
")",
".",
"keys",
"(",
")",
"task_cls_params_dict",
"=",
"dict",
"(",
"task_cls",
".",
"get_params",
"(",
")",
")",
"task_cls_param_names",
"=",
"task_cls_params_dict",
".",
"keys",
"(",
")",
"common_param_names",
"=",
"set",
"(",
"task_instance_param_names",
")",
".",
"intersection",
"(",
"set",
"(",
"task_cls_param_names",
")",
")",
"common_param_vals",
"=",
"[",
"(",
"key",
",",
"task_cls_params_dict",
"[",
"key",
"]",
")",
"for",
"key",
"in",
"common_param_names",
"]",
"common_kwargs",
"=",
"dict",
"(",
"(",
"(",
"key",
",",
"task_instance",
".",
"param_kwargs",
"[",
"key",
"]",
")",
"for",
"key",
"in",
"common_param_names",
")",
")",
"vals",
"=",
"dict",
"(",
"task_instance",
".",
"get_param_values",
"(",
"common_param_vals",
",",
"[",
"]",
",",
"common_kwargs",
")",
")",
"return",
"vals"
] | grab all the values in task_instance that are found in task_cls . | train | true |
37,308 | def volume_glance_metadata_get(context, volume_id):
return IMPL.volume_glance_metadata_get(context, volume_id)
| [
"def",
"volume_glance_metadata_get",
"(",
"context",
",",
"volume_id",
")",
":",
"return",
"IMPL",
".",
"volume_glance_metadata_get",
"(",
"context",
",",
"volume_id",
")"
] | return the glance metadata for a volume . | train | false |
37,309 | def SplitJar(input_jar, output_directory, maximum_size=sys.maxint, include_predicate=(lambda name: True)):
if (not input_jar.lower().endswith('.jar')):
raise ValueError(('Does not end with .jar: %s' % input_jar))
base_name = os.path.splitext(os.path.basename(input_jar))[0]
with _Maker(output_directory, base_name, maximum_size) as maker:
for (name, contents) in JarContents(input_jar):
if ((name != 'META-INF/MANIFEST.MF') and (name != 'INDEX.LIST') and include_predicate(name)):
size = len(contents)
if (size > maximum_size):
raise JarWriteError(('Entry %s in %s has size %d which is bigger than the maximum jar size %d' % (name, input_jar, size, maximum_size)))
maker.WriteStr(name, contents)
| [
"def",
"SplitJar",
"(",
"input_jar",
",",
"output_directory",
",",
"maximum_size",
"=",
"sys",
".",
"maxint",
",",
"include_predicate",
"=",
"(",
"lambda",
"name",
":",
"True",
")",
")",
":",
"if",
"(",
"not",
"input_jar",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.jar'",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Does not end with .jar: %s'",
"%",
"input_jar",
")",
")",
"base_name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"input_jar",
")",
")",
"[",
"0",
"]",
"with",
"_Maker",
"(",
"output_directory",
",",
"base_name",
",",
"maximum_size",
")",
"as",
"maker",
":",
"for",
"(",
"name",
",",
"contents",
")",
"in",
"JarContents",
"(",
"input_jar",
")",
":",
"if",
"(",
"(",
"name",
"!=",
"'META-INF/MANIFEST.MF'",
")",
"and",
"(",
"name",
"!=",
"'INDEX.LIST'",
")",
"and",
"include_predicate",
"(",
"name",
")",
")",
":",
"size",
"=",
"len",
"(",
"contents",
")",
"if",
"(",
"size",
">",
"maximum_size",
")",
":",
"raise",
"JarWriteError",
"(",
"(",
"'Entry %s in %s has size %d which is bigger than the maximum jar size %d'",
"%",
"(",
"name",
",",
"input_jar",
",",
"size",
",",
"maximum_size",
")",
")",
")",
"maker",
".",
"WriteStr",
"(",
"name",
",",
"contents",
")"
] | copies an input jar into a directory . | train | false |
37,310 | def quote_windows(value):
value = list2cmdline([value])
return value
| [
"def",
"quote_windows",
"(",
"value",
")",
":",
"value",
"=",
"list2cmdline",
"(",
"[",
"value",
"]",
")",
"return",
"value"
] | return a quoted version of the value which can be used as one token in a windows command line . | train | false |
37,311 | def validate_torch_files(files):
if (str(files['weights_file'].filename) is ''):
raise werkzeug.exceptions.BadRequest('Missing weights file')
elif (files['weights_file'].filename.rsplit('.', 1)[1] != 't7'):
raise werkzeug.exceptions.BadRequest('Weights must be a .t7 file')
if (str(files['model_def_file'].filename) is ''):
raise werkzeug.exceptions.BadRequest('Missing model definition file')
elif (files['model_def_file'].filename.rsplit('.', 1)[1] != 'lua'):
raise werkzeug.exceptions.BadRequest('Model definition must be .lua file')
weights_path = get_tempfile(flask.request.files['weights_file'], '.t7')
model_def_path = get_tempfile(flask.request.files['model_def_file'], '.lua')
return (weights_path, model_def_path)
| [
"def",
"validate_torch_files",
"(",
"files",
")",
":",
"if",
"(",
"str",
"(",
"files",
"[",
"'weights_file'",
"]",
".",
"filename",
")",
"is",
"''",
")",
":",
"raise",
"werkzeug",
".",
"exceptions",
".",
"BadRequest",
"(",
"'Missing weights file'",
")",
"elif",
"(",
"files",
"[",
"'weights_file'",
"]",
".",
"filename",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"[",
"1",
"]",
"!=",
"'t7'",
")",
":",
"raise",
"werkzeug",
".",
"exceptions",
".",
"BadRequest",
"(",
"'Weights must be a .t7 file'",
")",
"if",
"(",
"str",
"(",
"files",
"[",
"'model_def_file'",
"]",
".",
"filename",
")",
"is",
"''",
")",
":",
"raise",
"werkzeug",
".",
"exceptions",
".",
"BadRequest",
"(",
"'Missing model definition file'",
")",
"elif",
"(",
"files",
"[",
"'model_def_file'",
"]",
".",
"filename",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"[",
"1",
"]",
"!=",
"'lua'",
")",
":",
"raise",
"werkzeug",
".",
"exceptions",
".",
"BadRequest",
"(",
"'Model definition must be .lua file'",
")",
"weights_path",
"=",
"get_tempfile",
"(",
"flask",
".",
"request",
".",
"files",
"[",
"'weights_file'",
"]",
",",
"'.t7'",
")",
"model_def_path",
"=",
"get_tempfile",
"(",
"flask",
".",
"request",
".",
"files",
"[",
"'model_def_file'",
"]",
",",
"'.lua'",
")",
"return",
"(",
"weights_path",
",",
"model_def_path",
")"
] | upload a torch model . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.