id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
55,201
|
def condense_zero_units(css):
return re.sub('([\\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)', '\\1\\2', css)
|
[
"def",
"condense_zero_units",
"(",
"css",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'([\\\\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)'",
",",
"'\\\\1\\\\2'",
",",
"css",
")"
] |
replace 0 with 0 .
|
train
| false
|
55,202
|
def get_file_for_svn_txn(repo_path, txn_id, filename):
return subproc_check_output(['svnlook', 'cat', repo_path, '-t', txn_id, filename])
|
[
"def",
"get_file_for_svn_txn",
"(",
"repo_path",
",",
"txn_id",
",",
"filename",
")",
":",
"return",
"subproc_check_output",
"(",
"[",
"'svnlook'",
",",
"'cat'",
",",
"repo_path",
",",
"'-t'",
",",
"txn_id",
",",
"filename",
"]",
")"
] |
returns file in an svn transaction .
|
train
| false
|
55,203
|
def _get_service_user(user, service_username):
if ((not user) or user.is_anonymous()):
try:
user = User.objects.get(username=service_username)
except User.DoesNotExist:
user = None
return user
|
[
"def",
"_get_service_user",
"(",
"user",
",",
"service_username",
")",
":",
"if",
"(",
"(",
"not",
"user",
")",
"or",
"user",
".",
"is_anonymous",
"(",
")",
")",
":",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"service_username",
")",
"except",
"User",
".",
"DoesNotExist",
":",
"user",
"=",
"None",
"return",
"user"
] |
retrieve and return the catalog integration service user object if the passed user is none or anonymous .
|
train
| false
|
55,204
|
def sobel_v(image, mask=None):
assert_nD(image, 2)
image = img_as_float(image)
result = convolve(image, VSOBEL_WEIGHTS)
return _mask_filter_result(result, mask)
|
[
"def",
"sobel_v",
"(",
"image",
",",
"mask",
"=",
"None",
")",
":",
"assert_nD",
"(",
"image",
",",
"2",
")",
"image",
"=",
"img_as_float",
"(",
"image",
")",
"result",
"=",
"convolve",
"(",
"image",
",",
"VSOBEL_WEIGHTS",
")",
"return",
"_mask_filter_result",
"(",
"result",
",",
"mask",
")"
] |
find the vertical edges of an image using the sobel transform .
|
train
| false
|
55,205
|
def single_line(text):
return re.sub(' +', ' ', normalize_newlines(text).replace('\n', '')).strip()
|
[
"def",
"single_line",
"(",
"text",
")",
":",
"return",
"re",
".",
"sub",
"(",
"' +'",
",",
"' '",
",",
"normalize_newlines",
"(",
"text",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
")",
".",
"strip",
"(",
")"
] |
quick utility to make comparing template output easier .
|
train
| false
|
55,207
|
def get_file_title(files_path_list, filename):
fname = os.path.basename(filename)
same_name_files = get_same_name_files(files_path_list, fname)
if (len(same_name_files) > 1):
compare_path = shortest_path(same_name_files)
if (compare_path == filename):
same_name_files.remove(path_components(filename))
compare_path = shortest_path(same_name_files)
diff_path = differentiate_prefix(path_components(filename), path_components(compare_path))
diff_path_length = len(diff_path)
path_component = path_components(diff_path)
if ((diff_path_length > 20) and (len(path_component) > 2)):
if ((path_component[0] != '/') and (path_component[0] != '')):
path_component = [path_component[0], '...', path_component[(-1)]]
else:
path_component = [path_component[2], '...', path_component[(-1)]]
diff_path = os.path.join(*path_component)
fname = ((fname + ' - ') + diff_path)
return fname
|
[
"def",
"get_file_title",
"(",
"files_path_list",
",",
"filename",
")",
":",
"fname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"same_name_files",
"=",
"get_same_name_files",
"(",
"files_path_list",
",",
"fname",
")",
"if",
"(",
"len",
"(",
"same_name_files",
")",
">",
"1",
")",
":",
"compare_path",
"=",
"shortest_path",
"(",
"same_name_files",
")",
"if",
"(",
"compare_path",
"==",
"filename",
")",
":",
"same_name_files",
".",
"remove",
"(",
"path_components",
"(",
"filename",
")",
")",
"compare_path",
"=",
"shortest_path",
"(",
"same_name_files",
")",
"diff_path",
"=",
"differentiate_prefix",
"(",
"path_components",
"(",
"filename",
")",
",",
"path_components",
"(",
"compare_path",
")",
")",
"diff_path_length",
"=",
"len",
"(",
"diff_path",
")",
"path_component",
"=",
"path_components",
"(",
"diff_path",
")",
"if",
"(",
"(",
"diff_path_length",
">",
"20",
")",
"and",
"(",
"len",
"(",
"path_component",
")",
">",
"2",
")",
")",
":",
"if",
"(",
"(",
"path_component",
"[",
"0",
"]",
"!=",
"'/'",
")",
"and",
"(",
"path_component",
"[",
"0",
"]",
"!=",
"''",
")",
")",
":",
"path_component",
"=",
"[",
"path_component",
"[",
"0",
"]",
",",
"'...'",
",",
"path_component",
"[",
"(",
"-",
"1",
")",
"]",
"]",
"else",
":",
"path_component",
"=",
"[",
"path_component",
"[",
"2",
"]",
",",
"'...'",
",",
"path_component",
"[",
"(",
"-",
"1",
")",
"]",
"]",
"diff_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"path_component",
")",
"fname",
"=",
"(",
"(",
"fname",
"+",
"' - '",
")",
"+",
"diff_path",
")",
"return",
"fname"
] |
get tab title without ambiguation .
|
train
| true
|
55,208
|
def query_chooser(query):
ids = []
for (column, operator, value) in _get_query_comparisons(query):
if column.shares_lineage(weather_locations.c.continent):
if (operator == operators.eq):
ids.append(shard_lookup[value])
elif (operator == operators.in_op):
ids.extend((shard_lookup[v] for v in value))
if (len(ids) == 0):
return ['north_america', 'asia', 'europe', 'south_america']
else:
return ids
|
[
"def",
"query_chooser",
"(",
"query",
")",
":",
"ids",
"=",
"[",
"]",
"for",
"(",
"column",
",",
"operator",
",",
"value",
")",
"in",
"_get_query_comparisons",
"(",
"query",
")",
":",
"if",
"column",
".",
"shares_lineage",
"(",
"weather_locations",
".",
"c",
".",
"continent",
")",
":",
"if",
"(",
"operator",
"==",
"operators",
".",
"eq",
")",
":",
"ids",
".",
"append",
"(",
"shard_lookup",
"[",
"value",
"]",
")",
"elif",
"(",
"operator",
"==",
"operators",
".",
"in_op",
")",
":",
"ids",
".",
"extend",
"(",
"(",
"shard_lookup",
"[",
"v",
"]",
"for",
"v",
"in",
"value",
")",
")",
"if",
"(",
"len",
"(",
"ids",
")",
"==",
"0",
")",
":",
"return",
"[",
"'north_america'",
",",
"'asia'",
",",
"'europe'",
",",
"'south_america'",
"]",
"else",
":",
"return",
"ids"
] |
query chooser .
|
train
| false
|
55,209
|
def remove_ignorable_whitespace(node):
if (node.tail and (node.tail.strip() == '')):
node.tail = None
for child in node:
if (node.text and (node.text.strip() == '')):
node.text = None
remove_ignorable_whitespace(child)
|
[
"def",
"remove_ignorable_whitespace",
"(",
"node",
")",
":",
"if",
"(",
"node",
".",
"tail",
"and",
"(",
"node",
".",
"tail",
".",
"strip",
"(",
")",
"==",
"''",
")",
")",
":",
"node",
".",
"tail",
"=",
"None",
"for",
"child",
"in",
"node",
":",
"if",
"(",
"node",
".",
"text",
"and",
"(",
"node",
".",
"text",
".",
"strip",
"(",
")",
"==",
"''",
")",
")",
":",
"node",
".",
"text",
"=",
"None",
"remove_ignorable_whitespace",
"(",
"child",
")"
] |
remove insignificant whitespace from xml nodes it should only remove whitespace in between elements and sub elements .
|
train
| false
|
55,212
|
def get_time_format(format='medium', locale=LC_TIME):
return Locale.parse(locale).time_formats[format]
|
[
"def",
"get_time_format",
"(",
"format",
"=",
"'medium'",
",",
"locale",
"=",
"LC_TIME",
")",
":",
"return",
"Locale",
".",
"parse",
"(",
"locale",
")",
".",
"time_formats",
"[",
"format",
"]"
] |
return the time formatting patterns used by the locale for the specified format .
|
train
| false
|
55,214
|
def getDisplayedDialogFromConstructor(repository):
try:
getReadRepository(repository)
return RepositoryDialog(repository, Tkinter.Tk())
except:
print 'this should never happen, getDisplayedDialogFromConstructor in settings could not open'
print repository
traceback.print_exc(file=sys.stdout)
return None
|
[
"def",
"getDisplayedDialogFromConstructor",
"(",
"repository",
")",
":",
"try",
":",
"getReadRepository",
"(",
"repository",
")",
"return",
"RepositoryDialog",
"(",
"repository",
",",
"Tkinter",
".",
"Tk",
"(",
")",
")",
"except",
":",
"print",
"'this should never happen, getDisplayedDialogFromConstructor in settings could not open'",
"print",
"repository",
"traceback",
".",
"print_exc",
"(",
"file",
"=",
"sys",
".",
"stdout",
")",
"return",
"None"
] |
display the repository dialog .
|
train
| false
|
55,215
|
@cors_enabled('*')
def serve_cors(*args, **kwargs):
if (not settings.DEBUG):
raise RuntimeError("Don't use kitsune.sumo.views.serve_cors in production.")
from django.views.static import serve
return serve(*args, **kwargs)
|
[
"@",
"cors_enabled",
"(",
"'*'",
")",
"def",
"serve_cors",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"settings",
".",
"DEBUG",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Don't use kitsune.sumo.views.serve_cors in production.\"",
")",
"from",
"django",
".",
"views",
".",
"static",
"import",
"serve",
"return",
"serve",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] |
a wrapper around django .
|
train
| false
|
55,217
|
def from_castra(x, columns=None, index=False):
from castra import Castra
if (not isinstance(x, Castra)):
x = Castra(x, readonly=True)
elif (not x._readonly):
x = Castra(x.path, readonly=True)
if (columns is None):
columns = x.columns
name = ('from-castra-' + tokenize(os.path.getmtime(x.path), x.path, columns, index))
dsk = dict((((name, i), (load_castra_partition, x, part, columns, index)) for (i, part) in enumerate(x.partitions)))
return Bag(dsk, name, len(x.partitions))
|
[
"def",
"from_castra",
"(",
"x",
",",
"columns",
"=",
"None",
",",
"index",
"=",
"False",
")",
":",
"from",
"castra",
"import",
"Castra",
"if",
"(",
"not",
"isinstance",
"(",
"x",
",",
"Castra",
")",
")",
":",
"x",
"=",
"Castra",
"(",
"x",
",",
"readonly",
"=",
"True",
")",
"elif",
"(",
"not",
"x",
".",
"_readonly",
")",
":",
"x",
"=",
"Castra",
"(",
"x",
".",
"path",
",",
"readonly",
"=",
"True",
")",
"if",
"(",
"columns",
"is",
"None",
")",
":",
"columns",
"=",
"x",
".",
"columns",
"name",
"=",
"(",
"'from-castra-'",
"+",
"tokenize",
"(",
"os",
".",
"path",
".",
"getmtime",
"(",
"x",
".",
"path",
")",
",",
"x",
".",
"path",
",",
"columns",
",",
"index",
")",
")",
"dsk",
"=",
"dict",
"(",
"(",
"(",
"(",
"name",
",",
"i",
")",
",",
"(",
"load_castra_partition",
",",
"x",
",",
"part",
",",
"columns",
",",
"index",
")",
")",
"for",
"(",
"i",
",",
"part",
")",
"in",
"enumerate",
"(",
"x",
".",
"partitions",
")",
")",
")",
"return",
"Bag",
"(",
"dsk",
",",
"name",
",",
"len",
"(",
"x",
".",
"partitions",
")",
")"
] |
load a dask bag from a castra .
|
train
| false
|
55,219
|
def _DefaultValueConstructorForField(field):
if (field.label == _FieldDescriptor.LABEL_REPEATED):
if (field.has_default_value and (field.default_value != [])):
raise ValueError(('Repeated field default value not empty list: %s' % field.default_value))
if (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
message_type = field.message_type
def MakeRepeatedMessageDefault(message):
return containers.RepeatedCompositeFieldContainer(message._listener_for_children, field.message_type)
return MakeRepeatedMessageDefault
else:
type_checker = type_checkers.GetTypeChecker(field)
def MakeRepeatedScalarDefault(message):
return containers.RepeatedScalarFieldContainer(message._listener_for_children, type_checker)
return MakeRepeatedScalarDefault
if (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
message_type = field.message_type
def MakeSubMessageDefault(message):
result = message_type._concrete_class()
result._SetListener(message._listener_for_children)
return result
return MakeSubMessageDefault
def MakeScalarDefault(message):
return field.default_value
return MakeScalarDefault
|
[
"def",
"_DefaultValueConstructorForField",
"(",
"field",
")",
":",
"if",
"(",
"field",
".",
"label",
"==",
"_FieldDescriptor",
".",
"LABEL_REPEATED",
")",
":",
"if",
"(",
"field",
".",
"has_default_value",
"and",
"(",
"field",
".",
"default_value",
"!=",
"[",
"]",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Repeated field default value not empty list: %s'",
"%",
"field",
".",
"default_value",
")",
")",
"if",
"(",
"field",
".",
"cpp_type",
"==",
"_FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
")",
":",
"message_type",
"=",
"field",
".",
"message_type",
"def",
"MakeRepeatedMessageDefault",
"(",
"message",
")",
":",
"return",
"containers",
".",
"RepeatedCompositeFieldContainer",
"(",
"message",
".",
"_listener_for_children",
",",
"field",
".",
"message_type",
")",
"return",
"MakeRepeatedMessageDefault",
"else",
":",
"type_checker",
"=",
"type_checkers",
".",
"GetTypeChecker",
"(",
"field",
")",
"def",
"MakeRepeatedScalarDefault",
"(",
"message",
")",
":",
"return",
"containers",
".",
"RepeatedScalarFieldContainer",
"(",
"message",
".",
"_listener_for_children",
",",
"type_checker",
")",
"return",
"MakeRepeatedScalarDefault",
"if",
"(",
"field",
".",
"cpp_type",
"==",
"_FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
")",
":",
"message_type",
"=",
"field",
".",
"message_type",
"def",
"MakeSubMessageDefault",
"(",
"message",
")",
":",
"result",
"=",
"message_type",
".",
"_concrete_class",
"(",
")",
"result",
".",
"_SetListener",
"(",
"message",
".",
"_listener_for_children",
")",
"return",
"result",
"return",
"MakeSubMessageDefault",
"def",
"MakeScalarDefault",
"(",
"message",
")",
":",
"return",
"field",
".",
"default_value",
"return",
"MakeScalarDefault"
] |
returns a function which returns a default value for a field .
|
train
| true
|
55,220
|
def new_figure_manager_given_figure(num, figure):
canvas = FigureCanvasGDK(figure)
manager = FigureManagerBase(canvas, num)
return manager
|
[
"def",
"new_figure_manager_given_figure",
"(",
"num",
",",
"figure",
")",
":",
"canvas",
"=",
"FigureCanvasGDK",
"(",
"figure",
")",
"manager",
"=",
"FigureManagerBase",
"(",
"canvas",
",",
"num",
")",
"return",
"manager"
] |
create a new figure manager instance for the given figure .
|
train
| false
|
55,221
|
def statusEnquiry():
a = TpPd(pd=3)
b = MessageType(mesType=52)
packet = (a / b)
return packet
|
[
"def",
"statusEnquiry",
"(",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"3",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"52",
")",
"packet",
"=",
"(",
"a",
"/",
"b",
")",
"return",
"packet"
] |
status enquiry section 9 .
|
train
| true
|
55,222
|
def _trial(factors, n, candidates, verbose=False):
if verbose:
factors0 = list(factors.keys())
nfactors = len(factors)
for d in candidates:
if ((n % d) == 0):
m = multiplicity(d, n)
n //= (d ** m)
factors[d] = m
if verbose:
for k in sorted(set(factors).difference(set(factors0))):
print((factor_msg % (k, factors[k])))
return (int(n), (len(factors) != nfactors))
|
[
"def",
"_trial",
"(",
"factors",
",",
"n",
",",
"candidates",
",",
"verbose",
"=",
"False",
")",
":",
"if",
"verbose",
":",
"factors0",
"=",
"list",
"(",
"factors",
".",
"keys",
"(",
")",
")",
"nfactors",
"=",
"len",
"(",
"factors",
")",
"for",
"d",
"in",
"candidates",
":",
"if",
"(",
"(",
"n",
"%",
"d",
")",
"==",
"0",
")",
":",
"m",
"=",
"multiplicity",
"(",
"d",
",",
"n",
")",
"n",
"//=",
"(",
"d",
"**",
"m",
")",
"factors",
"[",
"d",
"]",
"=",
"m",
"if",
"verbose",
":",
"for",
"k",
"in",
"sorted",
"(",
"set",
"(",
"factors",
")",
".",
"difference",
"(",
"set",
"(",
"factors0",
")",
")",
")",
":",
"print",
"(",
"(",
"factor_msg",
"%",
"(",
"k",
",",
"factors",
"[",
"k",
"]",
")",
")",
")",
"return",
"(",
"int",
"(",
"n",
")",
",",
"(",
"len",
"(",
"factors",
")",
"!=",
"nfactors",
")",
")"
] |
helper function for integer factorization .
|
train
| false
|
55,224
|
def read_weighted_edgelist(path, comments='#', delimiter=None, create_using=None, nodetype=None, encoding='utf-8'):
return read_edgelist(path, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, data=(('weight', float),), encoding=encoding)
|
[
"def",
"read_weighted_edgelist",
"(",
"path",
",",
"comments",
"=",
"'#'",
",",
"delimiter",
"=",
"None",
",",
"create_using",
"=",
"None",
",",
"nodetype",
"=",
"None",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"return",
"read_edgelist",
"(",
"path",
",",
"comments",
"=",
"comments",
",",
"delimiter",
"=",
"delimiter",
",",
"create_using",
"=",
"create_using",
",",
"nodetype",
"=",
"nodetype",
",",
"data",
"=",
"(",
"(",
"'weight'",
",",
"float",
")",
",",
")",
",",
"encoding",
"=",
"encoding",
")"
] |
read a graph as list of edges with numeric weights .
|
train
| false
|
55,225
|
def virtualenv_no_global():
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if (running_under_virtualenv() and os.path.isfile(no_global_file)):
return True
|
[
"def",
"virtualenv_no_global",
"(",
")",
":",
"site_mod_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"site",
".",
"__file__",
")",
")",
"no_global_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"site_mod_dir",
",",
"'no-global-site-packages.txt'",
")",
"if",
"(",
"running_under_virtualenv",
"(",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"no_global_file",
")",
")",
":",
"return",
"True"
] |
return true if in a venv and no system site packages .
|
train
| true
|
55,226
|
def pkt_line(data):
if (data is None):
return '0000'
return (('%04x' % (len(data) + 4)).encode('ascii') + data)
|
[
"def",
"pkt_line",
"(",
"data",
")",
":",
"if",
"(",
"data",
"is",
"None",
")",
":",
"return",
"'0000'",
"return",
"(",
"(",
"'%04x'",
"%",
"(",
"len",
"(",
"data",
")",
"+",
"4",
")",
")",
".",
"encode",
"(",
"'ascii'",
")",
"+",
"data",
")"
] |
wrap data in a pkt-line .
|
train
| false
|
55,228
|
@fixture
def patch_network_functions(monkeypatch):
import inbox.actions.backends
for backend in inbox.actions.backends.module_registry.values():
for method_name in backend.__all__:
monkeypatch.setattr(((backend.__name__ + '.') + method_name), (lambda *args, **kwargs: None))
|
[
"@",
"fixture",
"def",
"patch_network_functions",
"(",
"monkeypatch",
")",
":",
"import",
"inbox",
".",
"actions",
".",
"backends",
"for",
"backend",
"in",
"inbox",
".",
"actions",
".",
"backends",
".",
"module_registry",
".",
"values",
"(",
")",
":",
"for",
"method_name",
"in",
"backend",
".",
"__all__",
":",
"monkeypatch",
".",
"setattr",
"(",
"(",
"(",
"backend",
".",
"__name__",
"+",
"'.'",
")",
"+",
"method_name",
")",
",",
"(",
"lambda",
"*",
"args",
",",
"**",
"kwargs",
":",
"None",
")",
")"
] |
monkeypatch syncback functions that actually talk to gmail so that the tests can run faster .
|
train
| false
|
55,229
|
@treeio_login_required
@handle_response_format
def ordered_product_delete(request, ordered_product_id, response_format='html'):
ordered_product = get_object_or_404(OrderedProduct, pk=ordered_product_id)
if ((not request.user.profile.has_permission(ordered_product, mode='w')) and (not request.user.profile.is_admin('treeio.sales'))):
return user_denied(request, "You don't have access to this Sale Status", response_format)
if request.POST:
if ('delete' in request.POST):
order_id = ordered_product.order_id
if ('trash' in request.POST):
ordered_product.trash = True
ordered_product.save()
else:
ordered_product.delete()
ordered_product.order.update_total()
return HttpResponseRedirect(reverse('sales_order_view', args=[order_id]))
elif ('cancel' in request.POST):
return HttpResponseRedirect(reverse('sales_ordered_product_view', args=[ordered_product.id]))
order = ordered_product.order
return render_to_response('sales/ordered_product_delete', {'ordered_product': ordered_product, 'order': order}, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"treeio_login_required",
"@",
"handle_response_format",
"def",
"ordered_product_delete",
"(",
"request",
",",
"ordered_product_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"ordered_product",
"=",
"get_object_or_404",
"(",
"OrderedProduct",
",",
"pk",
"=",
"ordered_product_id",
")",
"if",
"(",
"(",
"not",
"request",
".",
"user",
".",
"profile",
".",
"has_permission",
"(",
"ordered_product",
",",
"mode",
"=",
"'w'",
")",
")",
"and",
"(",
"not",
"request",
".",
"user",
".",
"profile",
".",
"is_admin",
"(",
"'treeio.sales'",
")",
")",
")",
":",
"return",
"user_denied",
"(",
"request",
",",
"\"You don't have access to this Sale Status\"",
",",
"response_format",
")",
"if",
"request",
".",
"POST",
":",
"if",
"(",
"'delete'",
"in",
"request",
".",
"POST",
")",
":",
"order_id",
"=",
"ordered_product",
".",
"order_id",
"if",
"(",
"'trash'",
"in",
"request",
".",
"POST",
")",
":",
"ordered_product",
".",
"trash",
"=",
"True",
"ordered_product",
".",
"save",
"(",
")",
"else",
":",
"ordered_product",
".",
"delete",
"(",
")",
"ordered_product",
".",
"order",
".",
"update_total",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'sales_order_view'",
",",
"args",
"=",
"[",
"order_id",
"]",
")",
")",
"elif",
"(",
"'cancel'",
"in",
"request",
".",
"POST",
")",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'sales_ordered_product_view'",
",",
"args",
"=",
"[",
"ordered_product",
".",
"id",
"]",
")",
")",
"order",
"=",
"ordered_product",
".",
"order",
"return",
"render_to_response",
"(",
"'sales/ordered_product_delete'",
",",
"{",
"'ordered_product'",
":",
"ordered_product",
",",
"'order'",
":",
"order",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
orderedproduct delete .
|
train
| false
|
55,230
|
def snmp_preprocessor(a_device, oid='.1.3.6.1.2.1.1.1.0'):
if (not (a_device.snmp_credentials.snmp_mode == 'snmp3')):
raise ValueError('Invalid SNMP mode in config_detect {}'.format(a_device.snmp_credentials.snmp_mode))
snmp_device = (a_device.ip_address, a_device.snmp_port)
snmp_user = (a_device.snmp_credentials.username, a_device.snmp_credentials.auth_key, a_device.snmp_credentials.encrypt_key)
auth_proto = a_device.snmp_credentials.auth_proto
encrypt_proto = a_device.snmp_credentials.encrypt_proto
return {'snmp_device': snmp_device, 'snmp_user': snmp_user, 'oid': oid, 'auth_proto': auth_proto, 'encrypt_proto': encrypt_proto}
|
[
"def",
"snmp_preprocessor",
"(",
"a_device",
",",
"oid",
"=",
"'.1.3.6.1.2.1.1.1.0'",
")",
":",
"if",
"(",
"not",
"(",
"a_device",
".",
"snmp_credentials",
".",
"snmp_mode",
"==",
"'snmp3'",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid SNMP mode in config_detect {}'",
".",
"format",
"(",
"a_device",
".",
"snmp_credentials",
".",
"snmp_mode",
")",
")",
"snmp_device",
"=",
"(",
"a_device",
".",
"ip_address",
",",
"a_device",
".",
"snmp_port",
")",
"snmp_user",
"=",
"(",
"a_device",
".",
"snmp_credentials",
".",
"username",
",",
"a_device",
".",
"snmp_credentials",
".",
"auth_key",
",",
"a_device",
".",
"snmp_credentials",
".",
"encrypt_key",
")",
"auth_proto",
"=",
"a_device",
".",
"snmp_credentials",
".",
"auth_proto",
"encrypt_proto",
"=",
"a_device",
".",
"snmp_credentials",
".",
"encrypt_proto",
"return",
"{",
"'snmp_device'",
":",
"snmp_device",
",",
"'snmp_user'",
":",
"snmp_user",
",",
"'oid'",
":",
"oid",
",",
"'auth_proto'",
":",
"auth_proto",
",",
"'encrypt_proto'",
":",
"encrypt_proto",
"}"
] |
extract snmp parameters from networkdevice object only supports snmpv3 .
|
train
| false
|
55,232
|
def _tile_perimeter_width(coord, projection):
perimeter = _tile_perimeter(coord, projection, False)
return (perimeter[8][0] - perimeter[0][0])
|
[
"def",
"_tile_perimeter_width",
"(",
"coord",
",",
"projection",
")",
":",
"perimeter",
"=",
"_tile_perimeter",
"(",
"coord",
",",
"projection",
",",
"False",
")",
"return",
"(",
"perimeter",
"[",
"8",
"]",
"[",
"0",
"]",
"-",
"perimeter",
"[",
"0",
"]",
"[",
"0",
"]",
")"
] |
get the width in projected coordinates of the coordinate tile polygon .
|
train
| false
|
55,233
|
def update_connection_pool(maxsize=1):
get_pool().connection_pool_kw.update(maxsize=maxsize)
|
[
"def",
"update_connection_pool",
"(",
"maxsize",
"=",
"1",
")",
":",
"get_pool",
"(",
")",
".",
"connection_pool_kw",
".",
"update",
"(",
"maxsize",
"=",
"maxsize",
")"
] |
update the global connection pool manager parameters .
|
train
| false
|
55,234
|
@LocalContext
def alphanumeric(raw_bytes, *a, **kw):
return encode(raw_bytes, expr=re_alphanumeric, *a, **kw)
|
[
"@",
"LocalContext",
"def",
"alphanumeric",
"(",
"raw_bytes",
",",
"*",
"a",
",",
"**",
"kw",
")",
":",
"return",
"encode",
"(",
"raw_bytes",
",",
"expr",
"=",
"re_alphanumeric",
",",
"*",
"a",
",",
"**",
"kw",
")"
] |
alphanumeric -> str encode the shellcode raw_bytes such that it does not contain any bytes except for [a-za-z0-9] .
|
train
| false
|
55,235
|
def libvlc_media_list_set_media(p_ml, p_md):
f = (_Cfunctions.get('libvlc_media_list_set_media', None) or _Cfunction('libvlc_media_list_set_media', ((1,), (1,)), None, None, MediaList, Media))
return f(p_ml, p_md)
|
[
"def",
"libvlc_media_list_set_media",
"(",
"p_ml",
",",
"p_md",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_list_set_media'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_list_set_media'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
")",
",",
"None",
",",
"None",
",",
"MediaList",
",",
"Media",
")",
")",
"return",
"f",
"(",
"p_ml",
",",
"p_md",
")"
] |
associate media instance with this media list instance .
|
train
| true
|
55,236
|
def _GetPdbPath(target_dict, config_name, vars):
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = ('%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']]))
pdb_path = ((vars['PRODUCT_DIR'] + '/') + pdb_base)
return pdb_path
|
[
"def",
"_GetPdbPath",
"(",
"target_dict",
",",
"config_name",
",",
"vars",
")",
":",
"config",
"=",
"target_dict",
"[",
"'configurations'",
"]",
"[",
"config_name",
"]",
"msvs",
"=",
"config",
".",
"setdefault",
"(",
"'msvs_settings'",
",",
"{",
"}",
")",
"linker",
"=",
"msvs",
".",
"get",
"(",
"'VCLinkerTool'",
",",
"{",
"}",
")",
"pdb_path",
"=",
"linker",
".",
"get",
"(",
"'ProgramDatabaseFile'",
")",
"if",
"pdb_path",
":",
"return",
"pdb_path",
"variables",
"=",
"target_dict",
".",
"get",
"(",
"'variables'",
",",
"{",
"}",
")",
"pdb_path",
"=",
"variables",
".",
"get",
"(",
"'msvs_large_pdb_path'",
",",
"None",
")",
"if",
"pdb_path",
":",
"return",
"pdb_path",
"pdb_base",
"=",
"target_dict",
".",
"get",
"(",
"'product_name'",
",",
"target_dict",
"[",
"'target_name'",
"]",
")",
"pdb_base",
"=",
"(",
"'%s.%s.pdb'",
"%",
"(",
"pdb_base",
",",
"TARGET_TYPE_EXT",
"[",
"target_dict",
"[",
"'type'",
"]",
"]",
")",
")",
"pdb_path",
"=",
"(",
"(",
"vars",
"[",
"'PRODUCT_DIR'",
"]",
"+",
"'/'",
")",
"+",
"pdb_base",
")",
"return",
"pdb_path"
] |
returns the path to the pdb file that will be generated by a given configuration .
|
train
| false
|
55,237
|
@constructor
def max(x, axis=None, keepdims=False):
try:
out = max_and_argmax(x, axis)[0]
except Exception:
out = CAReduce(scal.maximum, axis)(x)
if keepdims:
out = makeKeepDims(x, out, axis)
return out
|
[
"@",
"constructor",
"def",
"max",
"(",
"x",
",",
"axis",
"=",
"None",
",",
"keepdims",
"=",
"False",
")",
":",
"try",
":",
"out",
"=",
"max_and_argmax",
"(",
"x",
",",
"axis",
")",
"[",
"0",
"]",
"except",
"Exception",
":",
"out",
"=",
"CAReduce",
"(",
"scal",
".",
"maximum",
",",
"axis",
")",
"(",
"x",
")",
"if",
"keepdims",
":",
"out",
"=",
"makeKeepDims",
"(",
"x",
",",
"out",
",",
"axis",
")",
"return",
"out"
] |
returns maximum elements obtained by iterating over given axis .
|
train
| false
|
55,238
|
def _is_axial_coil(coil):
is_ax = (coil['coil_class'] in (FIFF.FWD_COILC_MAG, FIFF.FWD_COILC_AXIAL_GRAD, FIFF.FWD_COILC_AXIAL_GRAD2))
return is_ax
|
[
"def",
"_is_axial_coil",
"(",
"coil",
")",
":",
"is_ax",
"=",
"(",
"coil",
"[",
"'coil_class'",
"]",
"in",
"(",
"FIFF",
".",
"FWD_COILC_MAG",
",",
"FIFF",
".",
"FWD_COILC_AXIAL_GRAD",
",",
"FIFF",
".",
"FWD_COILC_AXIAL_GRAD2",
")",
")",
"return",
"is_ax"
] |
determine if the coil is axial .
|
train
| false
|
55,239
|
@public
def guess_generating_function_rational(v, X=Symbol('x')):
q = find_simple_recurrence_vector(v)
n = len(q)
if (n <= 1):
return None
p = [sum(((v[(i - k)] * q[k]) for k in range(min((i + 1), n)))) for i in range(len(v))]
return (sum(((p[k] * (X ** k)) for k in range(len(p)))) / sum(((q[k] * (X ** k)) for k in range(n))))
|
[
"@",
"public",
"def",
"guess_generating_function_rational",
"(",
"v",
",",
"X",
"=",
"Symbol",
"(",
"'x'",
")",
")",
":",
"q",
"=",
"find_simple_recurrence_vector",
"(",
"v",
")",
"n",
"=",
"len",
"(",
"q",
")",
"if",
"(",
"n",
"<=",
"1",
")",
":",
"return",
"None",
"p",
"=",
"[",
"sum",
"(",
"(",
"(",
"v",
"[",
"(",
"i",
"-",
"k",
")",
"]",
"*",
"q",
"[",
"k",
"]",
")",
"for",
"k",
"in",
"range",
"(",
"min",
"(",
"(",
"i",
"+",
"1",
")",
",",
"n",
")",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"v",
")",
")",
"]",
"return",
"(",
"sum",
"(",
"(",
"(",
"p",
"[",
"k",
"]",
"*",
"(",
"X",
"**",
"k",
")",
")",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"p",
")",
")",
")",
")",
"/",
"sum",
"(",
"(",
"(",
"q",
"[",
"k",
"]",
"*",
"(",
"X",
"**",
"k",
")",
")",
"for",
"k",
"in",
"range",
"(",
"n",
")",
")",
")",
")"
] |
tries to "guess" a rational generating function for a sequence of rational numbers v .
|
train
| false
|
55,241
|
def add_completer(widget, items):
completer = QtWidgets.QCompleter(items, widget)
completer.setCaseSensitivity(Qt.CaseInsensitive)
completer.setCompletionMode(QtWidgets.QCompleter.InlineCompletion)
widget.setCompleter(completer)
|
[
"def",
"add_completer",
"(",
"widget",
",",
"items",
")",
":",
"completer",
"=",
"QtWidgets",
".",
"QCompleter",
"(",
"items",
",",
"widget",
")",
"completer",
".",
"setCaseSensitivity",
"(",
"Qt",
".",
"CaseInsensitive",
")",
"completer",
".",
"setCompletionMode",
"(",
"QtWidgets",
".",
"QCompleter",
".",
"InlineCompletion",
")",
"widget",
".",
"setCompleter",
"(",
"completer",
")"
] |
add simple completion to a widget .
|
train
| false
|
55,242
|
def _compute_hash_v1(get_deps_dict, hash):
uniquedeps = get_deps_dict['uniquedeps']
spec = get_deps_dict['spec']
hash.update(spec.text)
for d in uniquedeps:
hash.update(roslib.msgs.get_registered(d).text)
return hash.hexdigest()
|
[
"def",
"_compute_hash_v1",
"(",
"get_deps_dict",
",",
"hash",
")",
":",
"uniquedeps",
"=",
"get_deps_dict",
"[",
"'uniquedeps'",
"]",
"spec",
"=",
"get_deps_dict",
"[",
"'spec'",
"]",
"hash",
".",
"update",
"(",
"spec",
".",
"text",
")",
"for",
"d",
"in",
"uniquedeps",
":",
"hash",
".",
"update",
"(",
"roslib",
".",
"msgs",
".",
"get_registered",
"(",
"d",
")",
".",
"text",
")",
"return",
"hash",
".",
"hexdigest",
"(",
")"
] |
subroutine of compute_md5_v1() .
|
train
| false
|
55,245
|
def hpsModelSynth(hfreq, hmag, hphase, stocEnv, N, H, fs):
yh = SM.sineModelSynth(hfreq, hmag, hphase, N, H, fs)
yst = STM.stochasticModelSynth(stocEnv, H, (H * 2))
y = (yh[:min(yh.size, yst.size)] + yst[:min(yh.size, yst.size)])
return (y, yh, yst)
|
[
"def",
"hpsModelSynth",
"(",
"hfreq",
",",
"hmag",
",",
"hphase",
",",
"stocEnv",
",",
"N",
",",
"H",
",",
"fs",
")",
":",
"yh",
"=",
"SM",
".",
"sineModelSynth",
"(",
"hfreq",
",",
"hmag",
",",
"hphase",
",",
"N",
",",
"H",
",",
"fs",
")",
"yst",
"=",
"STM",
".",
"stochasticModelSynth",
"(",
"stocEnv",
",",
"H",
",",
"(",
"H",
"*",
"2",
")",
")",
"y",
"=",
"(",
"yh",
"[",
":",
"min",
"(",
"yh",
".",
"size",
",",
"yst",
".",
"size",
")",
"]",
"+",
"yst",
"[",
":",
"min",
"(",
"yh",
".",
"size",
",",
"yst",
".",
"size",
")",
"]",
")",
"return",
"(",
"y",
",",
"yh",
",",
"yst",
")"
] |
synthesis of a sound using the harmonic plus stochastic model hfreq .
|
train
| false
|
55,248
|
def getMatrixTetragridMatrix(matrixTetragrid, prefix, xmlElement):
matrixKey = (prefix + 'matrix')
evaluatedDictionary = evaluate.getEvaluatedDictionary([matrixKey], xmlElement)
if (len(evaluatedDictionary.keys()) < 1):
return matrixTetragrid
value = evaluatedDictionary[matrixKey]
if ((value == None) or (value == 'None')):
print 'Warning, value in getMatrixTetragridMatrix in matrix is None for matrixKey for dictionary:'
print matrixKey
print evaluatedDictionary
else:
matrixTetragrid = getIdentityMatrixTetragrid(matrixTetragrid)
for (rowIndex, row) in enumerate(value):
for (elementIndex, element) in enumerate(row):
matrixTetragrid[rowIndex][elementIndex] = element
euclidean.removeListFromDictionary(xmlElement.attributeDictionary, [matrixKey])
return matrixTetragrid
|
[
"def",
"getMatrixTetragridMatrix",
"(",
"matrixTetragrid",
",",
"prefix",
",",
"xmlElement",
")",
":",
"matrixKey",
"=",
"(",
"prefix",
"+",
"'matrix'",
")",
"evaluatedDictionary",
"=",
"evaluate",
".",
"getEvaluatedDictionary",
"(",
"[",
"matrixKey",
"]",
",",
"xmlElement",
")",
"if",
"(",
"len",
"(",
"evaluatedDictionary",
".",
"keys",
"(",
")",
")",
"<",
"1",
")",
":",
"return",
"matrixTetragrid",
"value",
"=",
"evaluatedDictionary",
"[",
"matrixKey",
"]",
"if",
"(",
"(",
"value",
"==",
"None",
")",
"or",
"(",
"value",
"==",
"'None'",
")",
")",
":",
"print",
"'Warning, value in getMatrixTetragridMatrix in matrix is None for matrixKey for dictionary:'",
"print",
"matrixKey",
"print",
"evaluatedDictionary",
"else",
":",
"matrixTetragrid",
"=",
"getIdentityMatrixTetragrid",
"(",
"matrixTetragrid",
")",
"for",
"(",
"rowIndex",
",",
"row",
")",
"in",
"enumerate",
"(",
"value",
")",
":",
"for",
"(",
"elementIndex",
",",
"element",
")",
"in",
"enumerate",
"(",
"row",
")",
":",
"matrixTetragrid",
"[",
"rowIndex",
"]",
"[",
"elementIndex",
"]",
"=",
"element",
"euclidean",
".",
"removeListFromDictionary",
"(",
"xmlElement",
".",
"attributeDictionary",
",",
"[",
"matrixKey",
"]",
")",
"return",
"matrixTetragrid"
] |
get the matrix tetragrid from the xmlelement matrix value .
|
train
| false
|
55,250
|
def ToScatteredId(v):
if (v >= _MAX_SCATTERED_COUNTER):
raise datastore_errors.BadArgumentError(('counter value too large (%d)' % v))
return ((_MAX_SEQUENTIAL_ID + 1) + long(ReverseBitsInt64((v << _SCATTER_SHIFT))))
|
[
"def",
"ToScatteredId",
"(",
"v",
")",
":",
"if",
"(",
"v",
">=",
"_MAX_SCATTERED_COUNTER",
")",
":",
"raise",
"datastore_errors",
".",
"BadArgumentError",
"(",
"(",
"'counter value too large (%d)'",
"%",
"v",
")",
")",
"return",
"(",
"(",
"_MAX_SEQUENTIAL_ID",
"+",
"1",
")",
"+",
"long",
"(",
"ReverseBitsInt64",
"(",
"(",
"v",
"<<",
"_SCATTER_SHIFT",
")",
")",
")",
")"
] |
map counter value v to the scattered id space .
|
train
| false
|
55,252
|
def getFirstWord(splitLine):
if (len(splitLine) > 0):
return splitLine[0]
return ''
|
[
"def",
"getFirstWord",
"(",
"splitLine",
")",
":",
"if",
"(",
"len",
"(",
"splitLine",
")",
">",
"0",
")",
":",
"return",
"splitLine",
"[",
"0",
"]",
"return",
"''"
] |
get the first word of a split line .
|
train
| false
|
55,253
|
def validate_input(trans, error_map, param_values, page_param_map):
first = param_values['name1']
second = param_values['name2']
if (first == second):
error_map['name1'] = 'The value names should be different.'
|
[
"def",
"validate_input",
"(",
"trans",
",",
"error_map",
",",
"param_values",
",",
"page_param_map",
")",
":",
"first",
"=",
"param_values",
"[",
"'name1'",
"]",
"second",
"=",
"param_values",
"[",
"'name2'",
"]",
"if",
"(",
"first",
"==",
"second",
")",
":",
"error_map",
"[",
"'name1'",
"]",
"=",
"'The value names should be different.'"
] |
validates the user input .
|
train
| false
|
55,255
|
def test_nonexistent_options_listed_in_order(script, data):
result = script.pip('install', '--no-index', ('--find-links=' + data.find_links), 'simplewheel[nonexistent, nope]', expect_stderr=True)
msg = " simplewheel 2.0 does not provide the extra 'nonexistent'\n simplewheel 2.0 does not provide the extra 'nope'"
assert (msg in result.stderr)
|
[
"def",
"test_nonexistent_options_listed_in_order",
"(",
"script",
",",
"data",
")",
":",
"result",
"=",
"script",
".",
"pip",
"(",
"'install'",
",",
"'--no-index'",
",",
"(",
"'--find-links='",
"+",
"data",
".",
"find_links",
")",
",",
"'simplewheel[nonexistent, nope]'",
",",
"expect_stderr",
"=",
"True",
")",
"msg",
"=",
"\" simplewheel 2.0 does not provide the extra 'nonexistent'\\n simplewheel 2.0 does not provide the extra 'nope'\"",
"assert",
"(",
"msg",
"in",
"result",
".",
"stderr",
")"
] |
warn the user for each extra that doesnt exist .
|
train
| false
|
55,256
|
def build_dict():
containers = dict([(c, (['all'] + (lxc.Container(c).get_config_item('lxc.group') or []))) for c in lxc.list_containers()])
groups = set(sum([g for g in containers.values()], []))
return dict([(g, {'hosts': [k for (k, v) in containers.items() if (g in v)], 'vars': {'ansible_connection': 'lxc'}}) for g in groups])
|
[
"def",
"build_dict",
"(",
")",
":",
"containers",
"=",
"dict",
"(",
"[",
"(",
"c",
",",
"(",
"[",
"'all'",
"]",
"+",
"(",
"lxc",
".",
"Container",
"(",
"c",
")",
".",
"get_config_item",
"(",
"'lxc.group'",
")",
"or",
"[",
"]",
")",
")",
")",
"for",
"c",
"in",
"lxc",
".",
"list_containers",
"(",
")",
"]",
")",
"groups",
"=",
"set",
"(",
"sum",
"(",
"[",
"g",
"for",
"g",
"in",
"containers",
".",
"values",
"(",
")",
"]",
",",
"[",
"]",
")",
")",
"return",
"dict",
"(",
"[",
"(",
"g",
",",
"{",
"'hosts'",
":",
"[",
"k",
"for",
"(",
"k",
",",
"v",
")",
"in",
"containers",
".",
"items",
"(",
")",
"if",
"(",
"g",
"in",
"v",
")",
"]",
",",
"'vars'",
":",
"{",
"'ansible_connection'",
":",
"'lxc'",
"}",
"}",
")",
"for",
"g",
"in",
"groups",
"]",
")"
] |
returns a dictionary keyed to the defined lxc groups .
|
train
| false
|
55,257
|
def create_mac_string(num, splitter=u':'):
mac = hex(num)[2:]
if (mac[(-1)] == u'L'):
mac = mac[:(-1)]
pad = max((12 - len(mac)), 0)
mac = ((u'0' * pad) + mac)
mac = splitter.join([mac[x:(x + 2)] for x in range(0, 12, 2)])
mac = mac.upper()
return mac
|
[
"def",
"create_mac_string",
"(",
"num",
",",
"splitter",
"=",
"u':'",
")",
":",
"mac",
"=",
"hex",
"(",
"num",
")",
"[",
"2",
":",
"]",
"if",
"(",
"mac",
"[",
"(",
"-",
"1",
")",
"]",
"==",
"u'L'",
")",
":",
"mac",
"=",
"mac",
"[",
":",
"(",
"-",
"1",
")",
"]",
"pad",
"=",
"max",
"(",
"(",
"12",
"-",
"len",
"(",
"mac",
")",
")",
",",
"0",
")",
"mac",
"=",
"(",
"(",
"u'0'",
"*",
"pad",
")",
"+",
"mac",
")",
"mac",
"=",
"splitter",
".",
"join",
"(",
"[",
"mac",
"[",
"x",
":",
"(",
"x",
"+",
"2",
")",
"]",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"12",
",",
"2",
")",
"]",
")",
"mac",
"=",
"mac",
".",
"upper",
"(",
")",
"return",
"mac"
] |
return the mac address interpretation of num .
|
train
| false
|
55,258
|
def get_rising_items(omit_sr_ids, count=4):
all_rising = rising.get_all_rising()
candidate_sr_ids = {sr_id for (link, score, sr_id) in all_rising}.difference(omit_sr_ids)
link_fullnames = [link for (link, score, sr_id) in all_rising if (sr_id in candidate_sr_ids)]
link_fullnames_to_show = random_sample(link_fullnames, count)
rising_links = Link._by_fullname(link_fullnames_to_show, return_dict=False, data=True)
rising_items = [ExploreItem(TYPE_RISING, 'ris', Subreddit._byID(l.sr_id), l) for l in rising_links]
return rising_items
|
[
"def",
"get_rising_items",
"(",
"omit_sr_ids",
",",
"count",
"=",
"4",
")",
":",
"all_rising",
"=",
"rising",
".",
"get_all_rising",
"(",
")",
"candidate_sr_ids",
"=",
"{",
"sr_id",
"for",
"(",
"link",
",",
"score",
",",
"sr_id",
")",
"in",
"all_rising",
"}",
".",
"difference",
"(",
"omit_sr_ids",
")",
"link_fullnames",
"=",
"[",
"link",
"for",
"(",
"link",
",",
"score",
",",
"sr_id",
")",
"in",
"all_rising",
"if",
"(",
"sr_id",
"in",
"candidate_sr_ids",
")",
"]",
"link_fullnames_to_show",
"=",
"random_sample",
"(",
"link_fullnames",
",",
"count",
")",
"rising_links",
"=",
"Link",
".",
"_by_fullname",
"(",
"link_fullnames_to_show",
",",
"return_dict",
"=",
"False",
",",
"data",
"=",
"True",
")",
"rising_items",
"=",
"[",
"ExploreItem",
"(",
"TYPE_RISING",
",",
"'ris'",
",",
"Subreddit",
".",
"_byID",
"(",
"l",
".",
"sr_id",
")",
",",
"l",
")",
"for",
"l",
"in",
"rising_links",
"]",
"return",
"rising_items"
] |
get links that are rising right now .
|
train
| false
|
55,260
|
def cloud_query_sinfo(cookie, tokens, source_path):
url = ''.join([const.PAN_URL, 'rest/2.0/services/cloud_dl?channel=chunlei&clienttype=0&web=1', '&method=query_sinfo&app_id=250528', '&bdstoken=', tokens['bdstoken'], '&source_path=', encoder.encode_uri_component(source_path), '&type=2', '&t=', util.timestamp()])
req = net.urlopen(url, headers={'Cookie': cookie.header_output()})
if req:
content = req.data
return json.loads(content.decode())
else:
return None
|
[
"def",
"cloud_query_sinfo",
"(",
"cookie",
",",
"tokens",
",",
"source_path",
")",
":",
"url",
"=",
"''",
".",
"join",
"(",
"[",
"const",
".",
"PAN_URL",
",",
"'rest/2.0/services/cloud_dl?channel=chunlei&clienttype=0&web=1'",
",",
"'&method=query_sinfo&app_id=250528'",
",",
"'&bdstoken='",
",",
"tokens",
"[",
"'bdstoken'",
"]",
",",
"'&source_path='",
",",
"encoder",
".",
"encode_uri_component",
"(",
"source_path",
")",
",",
"'&type=2'",
",",
"'&t='",
",",
"util",
".",
"timestamp",
"(",
")",
"]",
")",
"req",
"=",
"net",
".",
"urlopen",
"(",
"url",
",",
"headers",
"=",
"{",
"'Cookie'",
":",
"cookie",
".",
"header_output",
"(",
")",
"}",
")",
"if",
"req",
":",
"content",
"=",
"req",
".",
"data",
"return",
"json",
".",
"loads",
"(",
"content",
".",
"decode",
"(",
")",
")",
"else",
":",
"return",
"None"
] |
source_path - bt种子的绝对路径 .
|
train
| true
|
55,262
|
def number_aware_alphabetical_cmp(str1, str2):
def flatten_tokens(tokens):
l = []
for token in tokens:
if isinstance(token, str):
for char in token:
l.append(char)
else:
assert isinstance(token, float)
l.append(token)
return l
seq1 = flatten_tokens(tokenize_by_number(str1))
seq2 = flatten_tokens(tokenize_by_number(str2))
l = min(len(seq1), len(seq2))
i = 0
while (i < l):
if (isinstance(seq1[i], float) and isinstance(seq2[i], string_types)):
return (-1)
elif (isinstance(seq1[i], string_types) and isinstance(seq2[i], float)):
return 1
elif (seq1[i] < seq2[i]):
return (-1)
elif (seq1[i] > seq2[i]):
return 1
i += 1
if (len(seq1) < len(seq2)):
return (-1)
elif (len(seq1) > len(seq2)):
return 1
return 0
|
[
"def",
"number_aware_alphabetical_cmp",
"(",
"str1",
",",
"str2",
")",
":",
"def",
"flatten_tokens",
"(",
"tokens",
")",
":",
"l",
"=",
"[",
"]",
"for",
"token",
"in",
"tokens",
":",
"if",
"isinstance",
"(",
"token",
",",
"str",
")",
":",
"for",
"char",
"in",
"token",
":",
"l",
".",
"append",
"(",
"char",
")",
"else",
":",
"assert",
"isinstance",
"(",
"token",
",",
"float",
")",
"l",
".",
"append",
"(",
"token",
")",
"return",
"l",
"seq1",
"=",
"flatten_tokens",
"(",
"tokenize_by_number",
"(",
"str1",
")",
")",
"seq2",
"=",
"flatten_tokens",
"(",
"tokenize_by_number",
"(",
"str2",
")",
")",
"l",
"=",
"min",
"(",
"len",
"(",
"seq1",
")",
",",
"len",
"(",
"seq2",
")",
")",
"i",
"=",
"0",
"while",
"(",
"i",
"<",
"l",
")",
":",
"if",
"(",
"isinstance",
"(",
"seq1",
"[",
"i",
"]",
",",
"float",
")",
"and",
"isinstance",
"(",
"seq2",
"[",
"i",
"]",
",",
"string_types",
")",
")",
":",
"return",
"(",
"-",
"1",
")",
"elif",
"(",
"isinstance",
"(",
"seq1",
"[",
"i",
"]",
",",
"string_types",
")",
"and",
"isinstance",
"(",
"seq2",
"[",
"i",
"]",
",",
"float",
")",
")",
":",
"return",
"1",
"elif",
"(",
"seq1",
"[",
"i",
"]",
"<",
"seq2",
"[",
"i",
"]",
")",
":",
"return",
"(",
"-",
"1",
")",
"elif",
"(",
"seq1",
"[",
"i",
"]",
">",
"seq2",
"[",
"i",
"]",
")",
":",
"return",
"1",
"i",
"+=",
"1",
"if",
"(",
"len",
"(",
"seq1",
")",
"<",
"len",
"(",
"seq2",
")",
")",
":",
"return",
"(",
"-",
"1",
")",
"elif",
"(",
"len",
"(",
"seq1",
")",
">",
"len",
"(",
"seq2",
")",
")",
":",
"return",
"1",
"return",
"0"
] |
cmp function for sorting a list of strings by alphabetical order .
|
train
| true
|
55,264
|
def strip_esc_sequence(txt):
if isinstance(txt, six.string_types):
return txt.replace('\x1b', '?')
else:
return txt
|
[
"def",
"strip_esc_sequence",
"(",
"txt",
")",
":",
"if",
"isinstance",
"(",
"txt",
",",
"six",
".",
"string_types",
")",
":",
"return",
"txt",
".",
"replace",
"(",
"'\\x1b'",
",",
"'?'",
")",
"else",
":",
"return",
"txt"
] |
replace esc to prevent unsafe strings from writing their own terminal manipulation commands .
|
train
| false
|
55,268
|
def deg(r):
return ((r / pi) * 180)
|
[
"def",
"deg",
"(",
"r",
")",
":",
"return",
"(",
"(",
"r",
"/",
"pi",
")",
"*",
"180",
")"
] |
return the degree value for the given radians .
|
train
| false
|
55,269
|
def cross_entropy_seq(logits, target_seqs, batch_size=1, num_steps=None):
loss = tf.nn.seq2seq.sequence_loss_by_example([logits], [tf.reshape(target_seqs, [(-1)])], [tf.ones([(batch_size * num_steps)])])
cost = (tf.reduce_sum(loss) / batch_size)
return cost
|
[
"def",
"cross_entropy_seq",
"(",
"logits",
",",
"target_seqs",
",",
"batch_size",
"=",
"1",
",",
"num_steps",
"=",
"None",
")",
":",
"loss",
"=",
"tf",
".",
"nn",
".",
"seq2seq",
".",
"sequence_loss_by_example",
"(",
"[",
"logits",
"]",
",",
"[",
"tf",
".",
"reshape",
"(",
"target_seqs",
",",
"[",
"(",
"-",
"1",
")",
"]",
")",
"]",
",",
"[",
"tf",
".",
"ones",
"(",
"[",
"(",
"batch_size",
"*",
"num_steps",
")",
"]",
")",
"]",
")",
"cost",
"=",
"(",
"tf",
".",
"reduce_sum",
"(",
"loss",
")",
"/",
"batch_size",
")",
"return",
"cost"
] |
returns the expression of cross-entropy of two sequences .
|
train
| false
|
55,270
|
def assert_crypto_availability(f):
@functools.wraps(f)
def wrapper(*args, **kwds):
if (AES is None):
raise CryptoUnavailableError()
return f(*args, **kwds)
return wrapper
|
[
"def",
"assert_crypto_availability",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
":",
"if",
"(",
"AES",
"is",
"None",
")",
":",
"raise",
"CryptoUnavailableError",
"(",
")",
"return",
"f",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
"return",
"wrapper"
] |
ensure crypto module is available .
|
train
| false
|
55,271
|
def create_urllib3_context(ssl_version=None, cert_reqs=None, options=None, ciphers=None):
context = SSLContext((ssl_version or ssl.PROTOCOL_SSLv23))
cert_reqs = (ssl.CERT_REQUIRED if (cert_reqs is None) else cert_reqs)
if (options is None):
options = 0
options |= OP_NO_SSLv2
options |= OP_NO_SSLv3
options |= OP_NO_COMPRESSION
context.options |= options
if getattr(context, 'supports_set_ciphers', True):
context.set_ciphers((ciphers or DEFAULT_CIPHERS))
context.verify_mode = cert_reqs
if (getattr(context, 'check_hostname', None) is not None):
context.check_hostname = False
return context
|
[
"def",
"create_urllib3_context",
"(",
"ssl_version",
"=",
"None",
",",
"cert_reqs",
"=",
"None",
",",
"options",
"=",
"None",
",",
"ciphers",
"=",
"None",
")",
":",
"context",
"=",
"SSLContext",
"(",
"(",
"ssl_version",
"or",
"ssl",
".",
"PROTOCOL_SSLv23",
")",
")",
"cert_reqs",
"=",
"(",
"ssl",
".",
"CERT_REQUIRED",
"if",
"(",
"cert_reqs",
"is",
"None",
")",
"else",
"cert_reqs",
")",
"if",
"(",
"options",
"is",
"None",
")",
":",
"options",
"=",
"0",
"options",
"|=",
"OP_NO_SSLv2",
"options",
"|=",
"OP_NO_SSLv3",
"options",
"|=",
"OP_NO_COMPRESSION",
"context",
".",
"options",
"|=",
"options",
"if",
"getattr",
"(",
"context",
",",
"'supports_set_ciphers'",
",",
"True",
")",
":",
"context",
".",
"set_ciphers",
"(",
"(",
"ciphers",
"or",
"DEFAULT_CIPHERS",
")",
")",
"context",
".",
"verify_mode",
"=",
"cert_reqs",
"if",
"(",
"getattr",
"(",
"context",
",",
"'check_hostname'",
",",
"None",
")",
"is",
"not",
"None",
")",
":",
"context",
".",
"check_hostname",
"=",
"False",
"return",
"context"
] |
all arguments have the same meaning as ssl_wrap_socket .
|
train
| true
|
55,272
|
def _get_block_count_url_string(xblock_type_set):
block_url = ''
if (len(xblock_type_set) > 0):
block_url += '&all_blocks=true&block_counts='
for (index, block_type) in enumerate(xblock_type_set):
block_url += block_type
if (index < (len(xblock_type_set) - 1)):
block_url += ','
return block_url
|
[
"def",
"_get_block_count_url_string",
"(",
"xblock_type_set",
")",
":",
"block_url",
"=",
"''",
"if",
"(",
"len",
"(",
"xblock_type_set",
")",
">",
"0",
")",
":",
"block_url",
"+=",
"'&all_blocks=true&block_counts='",
"for",
"(",
"index",
",",
"block_type",
")",
"in",
"enumerate",
"(",
"xblock_type_set",
")",
":",
"block_url",
"+=",
"block_type",
"if",
"(",
"index",
"<",
"(",
"len",
"(",
"xblock_type_set",
")",
"-",
"1",
")",
")",
":",
"block_url",
"+=",
"','",
"return",
"block_url"
] |
build the string from the xblock type set to append to the block url for block_count types arguments: xblock_type_set : a set of strings for all the block types returns: str: the portion to append to the block url .
|
train
| false
|
55,273
|
def create_territories():
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default(u'country')
root_territory = get_root_of(u'Territory')
for name in (country, _(u'Rest Of The World')):
if (name and (not frappe.db.exists(u'Territory', name))):
frappe.get_doc({u'doctype': u'Territory', u'territory_name': name.replace(u"'", u''), u'parent_territory': root_territory, u'is_group': u'No'}).insert()
|
[
"def",
"create_territories",
"(",
")",
":",
"from",
"frappe",
".",
"utils",
".",
"nestedset",
"import",
"get_root_of",
"country",
"=",
"frappe",
".",
"db",
".",
"get_default",
"(",
"u'country'",
")",
"root_territory",
"=",
"get_root_of",
"(",
"u'Territory'",
")",
"for",
"name",
"in",
"(",
"country",
",",
"_",
"(",
"u'Rest Of The World'",
")",
")",
":",
"if",
"(",
"name",
"and",
"(",
"not",
"frappe",
".",
"db",
".",
"exists",
"(",
"u'Territory'",
",",
"name",
")",
")",
")",
":",
"frappe",
".",
"get_doc",
"(",
"{",
"u'doctype'",
":",
"u'Territory'",
",",
"u'territory_name'",
":",
"name",
".",
"replace",
"(",
"u\"'\"",
",",
"u''",
")",
",",
"u'parent_territory'",
":",
"root_territory",
",",
"u'is_group'",
":",
"u'No'",
"}",
")",
".",
"insert",
"(",
")"
] |
create two default territories .
|
train
| false
|
55,274
|
@task
def GitHub_release_text():
shortversion = get_sympy_short_version()
htmltable = table()
out = 'See https://github.com/sympy/sympy/wiki/release-notes-for-{shortversion} for the release notes.\n\n{htmltable}\n\n**Note**: Do not download the **Source code (zip)** or the **Source code (tar.gz)**\nfiles below.\n'
out = out.format(shortversion=shortversion, htmltable=htmltable)
print(blue('Here are the release notes to copy into the GitHub release Markdown form:', bold=True))
print()
print(out)
return out
|
[
"@",
"task",
"def",
"GitHub_release_text",
"(",
")",
":",
"shortversion",
"=",
"get_sympy_short_version",
"(",
")",
"htmltable",
"=",
"table",
"(",
")",
"out",
"=",
"'See https://github.com/sympy/sympy/wiki/release-notes-for-{shortversion} for the release notes.\\n\\n{htmltable}\\n\\n**Note**: Do not download the **Source code (zip)** or the **Source code (tar.gz)**\\nfiles below.\\n'",
"out",
"=",
"out",
".",
"format",
"(",
"shortversion",
"=",
"shortversion",
",",
"htmltable",
"=",
"htmltable",
")",
"print",
"(",
"blue",
"(",
"'Here are the release notes to copy into the GitHub release Markdown form:'",
",",
"bold",
"=",
"True",
")",
")",
"print",
"(",
")",
"print",
"(",
"out",
")",
"return",
"out"
] |
generate text to put in the github release markdown box .
|
train
| false
|
55,275
|
@pytest.mark.parametrize('fast_writer', [True, False])
def test_byte_string_output(fast_writer):
t = table.Table([['Hello', 'World']], dtype=['S10'])
out = StringIO()
ascii.write(t, out, fast_writer=fast_writer)
assert (out.getvalue().splitlines() == ['col0', 'Hello', 'World'])
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'fast_writer'",
",",
"[",
"True",
",",
"False",
"]",
")",
"def",
"test_byte_string_output",
"(",
"fast_writer",
")",
":",
"t",
"=",
"table",
".",
"Table",
"(",
"[",
"[",
"'Hello'",
",",
"'World'",
"]",
"]",
",",
"dtype",
"=",
"[",
"'S10'",
"]",
")",
"out",
"=",
"StringIO",
"(",
")",
"ascii",
".",
"write",
"(",
"t",
",",
"out",
",",
"fast_writer",
"=",
"fast_writer",
")",
"assert",
"(",
"out",
".",
"getvalue",
"(",
")",
".",
"splitlines",
"(",
")",
"==",
"[",
"'col0'",
",",
"'Hello'",
",",
"'World'",
"]",
")"
] |
test the fix for #4350 where byte strings were output with a leading b on py3 .
|
train
| false
|
55,276
|
def _name_value_to_bson(name, value, check_keys, opts):
try:
return _ENCODERS[type(value)](name, value, check_keys, opts)
except KeyError:
pass
marker = getattr(value, '_type_marker', None)
if (isinstance(marker, int) and (marker in _MARKERS)):
func = _MARKERS[marker]
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
for base in _ENCODERS:
if isinstance(value, base):
func = _ENCODERS[base]
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
raise InvalidDocument(('cannot convert value of type %s to bson' % type(value)))
|
[
"def",
"_name_value_to_bson",
"(",
"name",
",",
"value",
",",
"check_keys",
",",
"opts",
")",
":",
"try",
":",
"return",
"_ENCODERS",
"[",
"type",
"(",
"value",
")",
"]",
"(",
"name",
",",
"value",
",",
"check_keys",
",",
"opts",
")",
"except",
"KeyError",
":",
"pass",
"marker",
"=",
"getattr",
"(",
"value",
",",
"'_type_marker'",
",",
"None",
")",
"if",
"(",
"isinstance",
"(",
"marker",
",",
"int",
")",
"and",
"(",
"marker",
"in",
"_MARKERS",
")",
")",
":",
"func",
"=",
"_MARKERS",
"[",
"marker",
"]",
"_ENCODERS",
"[",
"type",
"(",
"value",
")",
"]",
"=",
"func",
"return",
"func",
"(",
"name",
",",
"value",
",",
"check_keys",
",",
"opts",
")",
"for",
"base",
"in",
"_ENCODERS",
":",
"if",
"isinstance",
"(",
"value",
",",
"base",
")",
":",
"func",
"=",
"_ENCODERS",
"[",
"base",
"]",
"_ENCODERS",
"[",
"type",
"(",
"value",
")",
"]",
"=",
"func",
"return",
"func",
"(",
"name",
",",
"value",
",",
"check_keys",
",",
"opts",
")",
"raise",
"InvalidDocument",
"(",
"(",
"'cannot convert value of type %s to bson'",
"%",
"type",
"(",
"value",
")",
")",
")"
] |
encode a single name .
|
train
| true
|
55,278
|
def getRandomRange(a, b, randfunc=None):
range_ = ((b - a) - 1)
bits = size(range_)
value = getRandomInteger(bits, randfunc)
while (value > range_):
value = getRandomInteger(bits, randfunc)
return (a + value)
|
[
"def",
"getRandomRange",
"(",
"a",
",",
"b",
",",
"randfunc",
"=",
"None",
")",
":",
"range_",
"=",
"(",
"(",
"b",
"-",
"a",
")",
"-",
"1",
")",
"bits",
"=",
"size",
"(",
"range_",
")",
"value",
"=",
"getRandomInteger",
"(",
"bits",
",",
"randfunc",
")",
"while",
"(",
"value",
">",
"range_",
")",
":",
"value",
"=",
"getRandomInteger",
"(",
"bits",
",",
"randfunc",
")",
"return",
"(",
"a",
"+",
"value",
")"
] |
getrandomrange:long return a random number n so that a <= n < b .
|
train
| false
|
55,279
|
def _determine_toggles(payload, toggles):
for (toggle, definition) in six.iteritems(toggles):
if (definition['value'] is not None):
if (((definition['value'] is True) or (definition['value'] == 'yes')) and (definition['type'] == 'yes_no')):
payload[toggle] = 'yes'
elif (((definition['value'] is False) or (definition['value'] == 'no')) and (definition['type'] == 'yes_no')):
payload[toggle] = 'no'
if (((definition['value'] is True) or (definition['value'] == 'yes')) and (definition['type'] == 'true_false')):
payload[toggle] = True
elif (((definition['value'] is False) or (definition['value'] == 'no')) and (definition['type'] == 'true_false')):
payload[toggle] = False
return payload
|
[
"def",
"_determine_toggles",
"(",
"payload",
",",
"toggles",
")",
":",
"for",
"(",
"toggle",
",",
"definition",
")",
"in",
"six",
".",
"iteritems",
"(",
"toggles",
")",
":",
"if",
"(",
"definition",
"[",
"'value'",
"]",
"is",
"not",
"None",
")",
":",
"if",
"(",
"(",
"(",
"definition",
"[",
"'value'",
"]",
"is",
"True",
")",
"or",
"(",
"definition",
"[",
"'value'",
"]",
"==",
"'yes'",
")",
")",
"and",
"(",
"definition",
"[",
"'type'",
"]",
"==",
"'yes_no'",
")",
")",
":",
"payload",
"[",
"toggle",
"]",
"=",
"'yes'",
"elif",
"(",
"(",
"(",
"definition",
"[",
"'value'",
"]",
"is",
"False",
")",
"or",
"(",
"definition",
"[",
"'value'",
"]",
"==",
"'no'",
")",
")",
"and",
"(",
"definition",
"[",
"'type'",
"]",
"==",
"'yes_no'",
")",
")",
":",
"payload",
"[",
"toggle",
"]",
"=",
"'no'",
"if",
"(",
"(",
"(",
"definition",
"[",
"'value'",
"]",
"is",
"True",
")",
"or",
"(",
"definition",
"[",
"'value'",
"]",
"==",
"'yes'",
")",
")",
"and",
"(",
"definition",
"[",
"'type'",
"]",
"==",
"'true_false'",
")",
")",
":",
"payload",
"[",
"toggle",
"]",
"=",
"True",
"elif",
"(",
"(",
"(",
"definition",
"[",
"'value'",
"]",
"is",
"False",
")",
"or",
"(",
"definition",
"[",
"'value'",
"]",
"==",
"'no'",
")",
")",
"and",
"(",
"definition",
"[",
"'type'",
"]",
"==",
"'true_false'",
")",
")",
":",
"payload",
"[",
"toggle",
"]",
"=",
"False",
"return",
"payload"
] |
bigip cant make up its mind if it likes yes / no or true or false .
|
train
| true
|
55,280
|
def serialize_revision(node, record, version, index, anon=False):
if anon:
user = None
else:
user = {u'name': version.creator.fullname, u'url': version.creator.url}
return {u'user': user, u'index': (index + 1), u'date': version.date_created.isoformat(), u'downloads': record.get_download_count(version=index), u'md5': version.metadata.get(u'md5'), u'sha256': version.metadata.get(u'sha256')}
|
[
"def",
"serialize_revision",
"(",
"node",
",",
"record",
",",
"version",
",",
"index",
",",
"anon",
"=",
"False",
")",
":",
"if",
"anon",
":",
"user",
"=",
"None",
"else",
":",
"user",
"=",
"{",
"u'name'",
":",
"version",
".",
"creator",
".",
"fullname",
",",
"u'url'",
":",
"version",
".",
"creator",
".",
"url",
"}",
"return",
"{",
"u'user'",
":",
"user",
",",
"u'index'",
":",
"(",
"index",
"+",
"1",
")",
",",
"u'date'",
":",
"version",
".",
"date_created",
".",
"isoformat",
"(",
")",
",",
"u'downloads'",
":",
"record",
".",
"get_download_count",
"(",
"version",
"=",
"index",
")",
",",
"u'md5'",
":",
"version",
".",
"metadata",
".",
"get",
"(",
"u'md5'",
")",
",",
"u'sha256'",
":",
"version",
".",
"metadata",
".",
"get",
"(",
"u'sha256'",
")",
"}"
] |
serialize revision for use in revisions table .
|
train
| false
|
55,283
|
def test_epochs_hash():
(raw, events) = _get_data()[:2]
epochs = Epochs(raw, events, event_id, tmin, tmax)
assert_raises(RuntimeError, epochs.__hash__)
epochs = Epochs(raw, events, event_id, tmin, tmax, preload=True)
assert_equal(hash(epochs), hash(epochs))
epochs_2 = Epochs(raw, events, event_id, tmin, tmax, preload=True)
assert_equal(hash(epochs), hash(epochs_2))
assert_true((pickle.dumps(epochs) == pickle.dumps(epochs_2)))
epochs_2._data[(0, 0, 0)] -= 1
assert_not_equal(hash(epochs), hash(epochs_2))
|
[
"def",
"test_epochs_hash",
"(",
")",
":",
"(",
"raw",
",",
"events",
")",
"=",
"_get_data",
"(",
")",
"[",
":",
"2",
"]",
"epochs",
"=",
"Epochs",
"(",
"raw",
",",
"events",
",",
"event_id",
",",
"tmin",
",",
"tmax",
")",
"assert_raises",
"(",
"RuntimeError",
",",
"epochs",
".",
"__hash__",
")",
"epochs",
"=",
"Epochs",
"(",
"raw",
",",
"events",
",",
"event_id",
",",
"tmin",
",",
"tmax",
",",
"preload",
"=",
"True",
")",
"assert_equal",
"(",
"hash",
"(",
"epochs",
")",
",",
"hash",
"(",
"epochs",
")",
")",
"epochs_2",
"=",
"Epochs",
"(",
"raw",
",",
"events",
",",
"event_id",
",",
"tmin",
",",
"tmax",
",",
"preload",
"=",
"True",
")",
"assert_equal",
"(",
"hash",
"(",
"epochs",
")",
",",
"hash",
"(",
"epochs_2",
")",
")",
"assert_true",
"(",
"(",
"pickle",
".",
"dumps",
"(",
"epochs",
")",
"==",
"pickle",
".",
"dumps",
"(",
"epochs_2",
")",
")",
")",
"epochs_2",
".",
"_data",
"[",
"(",
"0",
",",
"0",
",",
"0",
")",
"]",
"-=",
"1",
"assert_not_equal",
"(",
"hash",
"(",
"epochs",
")",
",",
"hash",
"(",
"epochs_2",
")",
")"
] |
test epoch hashing .
|
train
| false
|
55,284
|
def read_plain_int32(file_obj, count):
length = (4 * count)
data = file_obj.read(length)
if (len(data) != length):
raise EOFError(u'Expected {0} bytes but got {1} bytes'.format(length, len(data)))
res = struct.unpack('<{0}i'.format(count).encode(u'utf-8'), data)
return res
|
[
"def",
"read_plain_int32",
"(",
"file_obj",
",",
"count",
")",
":",
"length",
"=",
"(",
"4",
"*",
"count",
")",
"data",
"=",
"file_obj",
".",
"read",
"(",
"length",
")",
"if",
"(",
"len",
"(",
"data",
")",
"!=",
"length",
")",
":",
"raise",
"EOFError",
"(",
"u'Expected {0} bytes but got {1} bytes'",
".",
"format",
"(",
"length",
",",
"len",
"(",
"data",
")",
")",
")",
"res",
"=",
"struct",
".",
"unpack",
"(",
"'<{0}i'",
".",
"format",
"(",
"count",
")",
".",
"encode",
"(",
"u'utf-8'",
")",
",",
"data",
")",
"return",
"res"
] |
read count 32-bit ints using the plain encoding .
|
train
| true
|
55,286
|
def create_tags(filesystemid, tags, keyid=None, key=None, profile=None, region=None, **kwargs):
client = _get_conn(key=key, keyid=keyid, profile=profile, region=region)
new_tags = []
for (k, v) in tags.iteritems():
new_tags.append({'Key': k, 'Value': v})
client.create_tags(FileSystemId=filesystemid, Tags=new_tags)
|
[
"def",
"create_tags",
"(",
"filesystemid",
",",
"tags",
",",
"keyid",
"=",
"None",
",",
"key",
"=",
"None",
",",
"profile",
"=",
"None",
",",
"region",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"client",
"=",
"_get_conn",
"(",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
",",
"region",
"=",
"region",
")",
"new_tags",
"=",
"[",
"]",
"for",
"(",
"k",
",",
"v",
")",
"in",
"tags",
".",
"iteritems",
"(",
")",
":",
"new_tags",
".",
"append",
"(",
"{",
"'Key'",
":",
"k",
",",
"'Value'",
":",
"v",
"}",
")",
"client",
".",
"create_tags",
"(",
"FileSystemId",
"=",
"filesystemid",
",",
"Tags",
"=",
"new_tags",
")"
] |
creates or overwrites tags associated with a file system .
|
train
| true
|
55,287
|
def read_template(tmplf):
if pkg_resources.resource_exists('roscreate', tmplf):
f = pkg_resources.resource_stream('roscreate', tmplf)
t = f.read()
else:
r = rospkg.RosPack()
with open(os.path.join(r.get_path('roscreate'), 'templates', tmplf)) as f:
t = f.read()
try:
t = t.decode('utf-8')
except AttributeError:
pass
return t
|
[
"def",
"read_template",
"(",
"tmplf",
")",
":",
"if",
"pkg_resources",
".",
"resource_exists",
"(",
"'roscreate'",
",",
"tmplf",
")",
":",
"f",
"=",
"pkg_resources",
".",
"resource_stream",
"(",
"'roscreate'",
",",
"tmplf",
")",
"t",
"=",
"f",
".",
"read",
"(",
")",
"else",
":",
"r",
"=",
"rospkg",
".",
"RosPack",
"(",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"r",
".",
"get_path",
"(",
"'roscreate'",
")",
",",
"'templates'",
",",
"tmplf",
")",
")",
"as",
"f",
":",
"t",
"=",
"f",
".",
"read",
"(",
")",
"try",
":",
"t",
"=",
"t",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"t"
] |
read resource template from egg installation .
|
train
| false
|
55,288
|
def make_or_verify_needed_dirs(config):
make_or_verify_core_dir(config.config_dir, constants.CONFIG_DIRS_MODE, os.geteuid(), config.strict_permissions)
make_or_verify_core_dir(config.work_dir, constants.CONFIG_DIRS_MODE, os.geteuid(), config.strict_permissions)
make_or_verify_core_dir(config.logs_dir, 448, os.geteuid(), config.strict_permissions)
|
[
"def",
"make_or_verify_needed_dirs",
"(",
"config",
")",
":",
"make_or_verify_core_dir",
"(",
"config",
".",
"config_dir",
",",
"constants",
".",
"CONFIG_DIRS_MODE",
",",
"os",
".",
"geteuid",
"(",
")",
",",
"config",
".",
"strict_permissions",
")",
"make_or_verify_core_dir",
"(",
"config",
".",
"work_dir",
",",
"constants",
".",
"CONFIG_DIRS_MODE",
",",
"os",
".",
"geteuid",
"(",
")",
",",
"config",
".",
"strict_permissions",
")",
"make_or_verify_core_dir",
"(",
"config",
".",
"logs_dir",
",",
"448",
",",
"os",
".",
"geteuid",
"(",
")",
",",
"config",
".",
"strict_permissions",
")"
] |
create or verify existance of config .
|
train
| false
|
55,289
|
@pytest.mark.hasgpu
def test_hist(nbin_offset_dim_dtype_inp, backend_pair):
((nbins, offset), dim, dtype, (name, inp_gen)) = nbin_offset_dim_dtype_inp
gpuflag = (check_gpu.get_compute_capability(0) >= 3.0)
if (gpuflag is False):
raise RuntimeError('Device does not have CUDA compute capability 3.0 or greater')
(ng, nc) = backend_pair
ng.set_hist_buffers(nbins, offset)
nc.set_hist_buffers(nbins, offset)
np_inp = inp_gen(dim).astype(dtype)
np_hist = ref_hist(np_inp, nbins=nbins, offset=offset)
for be in [ng, nc]:
be_inp = be.array(np_inp, dtype=dtype)
be_hist = be_inp.hist(name)
assert tensors_allclose(np_hist, be_hist)
|
[
"@",
"pytest",
".",
"mark",
".",
"hasgpu",
"def",
"test_hist",
"(",
"nbin_offset_dim_dtype_inp",
",",
"backend_pair",
")",
":",
"(",
"(",
"nbins",
",",
"offset",
")",
",",
"dim",
",",
"dtype",
",",
"(",
"name",
",",
"inp_gen",
")",
")",
"=",
"nbin_offset_dim_dtype_inp",
"gpuflag",
"=",
"(",
"check_gpu",
".",
"get_compute_capability",
"(",
"0",
")",
">=",
"3.0",
")",
"if",
"(",
"gpuflag",
"is",
"False",
")",
":",
"raise",
"RuntimeError",
"(",
"'Device does not have CUDA compute capability 3.0 or greater'",
")",
"(",
"ng",
",",
"nc",
")",
"=",
"backend_pair",
"ng",
".",
"set_hist_buffers",
"(",
"nbins",
",",
"offset",
")",
"nc",
".",
"set_hist_buffers",
"(",
"nbins",
",",
"offset",
")",
"np_inp",
"=",
"inp_gen",
"(",
"dim",
")",
".",
"astype",
"(",
"dtype",
")",
"np_hist",
"=",
"ref_hist",
"(",
"np_inp",
",",
"nbins",
"=",
"nbins",
",",
"offset",
"=",
"offset",
")",
"for",
"be",
"in",
"[",
"ng",
",",
"nc",
"]",
":",
"be_inp",
"=",
"be",
".",
"array",
"(",
"np_inp",
",",
"dtype",
"=",
"dtype",
")",
"be_hist",
"=",
"be_inp",
".",
"hist",
"(",
"name",
")",
"assert",
"tensors_allclose",
"(",
"np_hist",
",",
"be_hist",
")"
] |
compare the nervanagpu and nervanacpu hist implementation to the reference implementation above .
|
train
| false
|
55,290
|
def _force_mutable(x):
if getattr(x, 'is_Matrix', False):
return x.as_mutable()
elif isinstance(x, Basic):
return x
elif hasattr(x, '__array__'):
a = x.__array__()
if (len(a.shape) == 0):
return sympify(a)
return Matrix(x)
return x
|
[
"def",
"_force_mutable",
"(",
"x",
")",
":",
"if",
"getattr",
"(",
"x",
",",
"'is_Matrix'",
",",
"False",
")",
":",
"return",
"x",
".",
"as_mutable",
"(",
")",
"elif",
"isinstance",
"(",
"x",
",",
"Basic",
")",
":",
"return",
"x",
"elif",
"hasattr",
"(",
"x",
",",
"'__array__'",
")",
":",
"a",
"=",
"x",
".",
"__array__",
"(",
")",
"if",
"(",
"len",
"(",
"a",
".",
"shape",
")",
"==",
"0",
")",
":",
"return",
"sympify",
"(",
"a",
")",
"return",
"Matrix",
"(",
"x",
")",
"return",
"x"
] |
return a matrix as a matrix .
|
train
| false
|
55,291
|
def _check_cron_env(user, name, value=None):
if (value is None):
value = ''
lst = __salt__['cron.list_tab'](user)
for env in lst['env']:
if (name == env['name']):
if (value != env['value']):
return 'update'
return 'present'
return 'absent'
|
[
"def",
"_check_cron_env",
"(",
"user",
",",
"name",
",",
"value",
"=",
"None",
")",
":",
"if",
"(",
"value",
"is",
"None",
")",
":",
"value",
"=",
"''",
"lst",
"=",
"__salt__",
"[",
"'cron.list_tab'",
"]",
"(",
"user",
")",
"for",
"env",
"in",
"lst",
"[",
"'env'",
"]",
":",
"if",
"(",
"name",
"==",
"env",
"[",
"'name'",
"]",
")",
":",
"if",
"(",
"value",
"!=",
"env",
"[",
"'value'",
"]",
")",
":",
"return",
"'update'",
"return",
"'present'",
"return",
"'absent'"
] |
return the environment changes .
|
train
| true
|
55,292
|
def test_daophot_indef():
table = ascii.read('t/daophot2.dat', Reader=ascii.Daophot)
for colname in table.colnames:
mask_value = (colname in ('OTIME', 'MAG', 'MERR', 'XAIRMASS'))
assert np.all((table[colname].mask == mask_value))
|
[
"def",
"test_daophot_indef",
"(",
")",
":",
"table",
"=",
"ascii",
".",
"read",
"(",
"'t/daophot2.dat'",
",",
"Reader",
"=",
"ascii",
".",
"Daophot",
")",
"for",
"colname",
"in",
"table",
".",
"colnames",
":",
"mask_value",
"=",
"(",
"colname",
"in",
"(",
"'OTIME'",
",",
"'MAG'",
",",
"'MERR'",
",",
"'XAIRMASS'",
")",
")",
"assert",
"np",
".",
"all",
"(",
"(",
"table",
"[",
"colname",
"]",
".",
"mask",
"==",
"mask_value",
")",
")"
] |
test that indef is correctly interpreted as a missing value .
|
train
| false
|
55,294
|
def add_password_arg(cmd, psw, required=False):
if (UNRAR_TOOL == ALT_TOOL):
return
if (psw is not None):
cmd.append(('-p' + psw))
else:
cmd.append('-p-')
|
[
"def",
"add_password_arg",
"(",
"cmd",
",",
"psw",
",",
"required",
"=",
"False",
")",
":",
"if",
"(",
"UNRAR_TOOL",
"==",
"ALT_TOOL",
")",
":",
"return",
"if",
"(",
"psw",
"is",
"not",
"None",
")",
":",
"cmd",
".",
"append",
"(",
"(",
"'-p'",
"+",
"psw",
")",
")",
"else",
":",
"cmd",
".",
"append",
"(",
"'-p-'",
")"
] |
append password switch to commandline .
|
train
| false
|
55,295
|
def _set_sentinel():
return LockType()
|
[
"def",
"_set_sentinel",
"(",
")",
":",
"return",
"LockType",
"(",
")"
] |
dummy implementation of _thread .
|
train
| false
|
55,297
|
def onLoggerAppShutDown():
INFO_MSG('onLoggerAppShutDown()')
|
[
"def",
"onLoggerAppShutDown",
"(",
")",
":",
"INFO_MSG",
"(",
"'onLoggerAppShutDown()'",
")"
] |
kbengine method .
|
train
| false
|
55,298
|
def project_get_networks(context, project_id, associate=True):
return IMPL.project_get_networks(context, project_id, associate)
|
[
"def",
"project_get_networks",
"(",
"context",
",",
"project_id",
",",
"associate",
"=",
"True",
")",
":",
"return",
"IMPL",
".",
"project_get_networks",
"(",
"context",
",",
"project_id",
",",
"associate",
")"
] |
return the network associated with the project .
|
train
| false
|
55,299
|
def new_test_client(cls, **kwargs):
client = cls(debug_logging=True)
client.login(**kwargs)
return client
|
[
"def",
"new_test_client",
"(",
"cls",
",",
"**",
"kwargs",
")",
":",
"client",
"=",
"cls",
"(",
"debug_logging",
"=",
"True",
")",
"client",
".",
"login",
"(",
"**",
"kwargs",
")",
"return",
"client"
] |
make an instance of a client .
|
train
| false
|
55,301
|
def default_formats():
return {'html': {'nbconvert_template': 'basic', 'label': 'Notebook', 'icon': 'book'}, 'slides': {'nbconvert_template': 'slides_reveal', 'label': 'Slides', 'icon': 'gift', 'test': (lambda nb, json: ('"slideshow"' in json))}, 'script': {'label': 'Code', 'icon': 'code', 'content_type': 'text/plain; charset=UTF-8'}}
|
[
"def",
"default_formats",
"(",
")",
":",
"return",
"{",
"'html'",
":",
"{",
"'nbconvert_template'",
":",
"'basic'",
",",
"'label'",
":",
"'Notebook'",
",",
"'icon'",
":",
"'book'",
"}",
",",
"'slides'",
":",
"{",
"'nbconvert_template'",
":",
"'slides_reveal'",
",",
"'label'",
":",
"'Slides'",
",",
"'icon'",
":",
"'gift'",
",",
"'test'",
":",
"(",
"lambda",
"nb",
",",
"json",
":",
"(",
"'\"slideshow\"'",
"in",
"json",
")",
")",
"}",
",",
"'script'",
":",
"{",
"'label'",
":",
"'Code'",
",",
"'icon'",
":",
"'code'",
",",
"'content_type'",
":",
"'text/plain; charset=UTF-8'",
"}",
"}"
] |
return the currently-implemented formats .
|
train
| false
|
55,303
|
def value_present(name, datastore, path, config):
ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''}
existing = __salt__['cisconso.get_data'](datastore, path)
if cmp(existing, config):
ret['result'] = True
ret['comment'] = 'Config is already set'
elif (__opts__['test'] is True):
ret['result'] = None
ret['comment'] = 'Config will be added'
diff = _DictDiffer(existing, config)
ret['changes']['new'] = diff.added()
ret['changes']['removed'] = diff.removed()
ret['changes']['changed'] = diff.changed()
else:
__salt__['cisconso.set_data_value'](datastore, path, config)
ret['result'] = True
ret['comment'] = 'Successfully added config'
diff = _DictDiffer(existing, config)
ret['changes']['new'] = diff.added()
ret['changes']['removed'] = diff.removed()
ret['changes']['changed'] = diff.changed()
return ret
|
[
"def",
"value_present",
"(",
"name",
",",
"datastore",
",",
"path",
",",
"config",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'result'",
":",
"False",
",",
"'changes'",
":",
"{",
"}",
",",
"'comment'",
":",
"''",
"}",
"existing",
"=",
"__salt__",
"[",
"'cisconso.get_data'",
"]",
"(",
"datastore",
",",
"path",
")",
"if",
"cmp",
"(",
"existing",
",",
"config",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'comment'",
"]",
"=",
"'Config is already set'",
"elif",
"(",
"__opts__",
"[",
"'test'",
"]",
"is",
"True",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"None",
"ret",
"[",
"'comment'",
"]",
"=",
"'Config will be added'",
"diff",
"=",
"_DictDiffer",
"(",
"existing",
",",
"config",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'new'",
"]",
"=",
"diff",
".",
"added",
"(",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'removed'",
"]",
"=",
"diff",
".",
"removed",
"(",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'changed'",
"]",
"=",
"diff",
".",
"changed",
"(",
")",
"else",
":",
"__salt__",
"[",
"'cisconso.set_data_value'",
"]",
"(",
"datastore",
",",
"path",
",",
"config",
")",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'comment'",
"]",
"=",
"'Successfully added config'",
"diff",
"=",
"_DictDiffer",
"(",
"existing",
",",
"config",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'new'",
"]",
"=",
"diff",
".",
"added",
"(",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'removed'",
"]",
"=",
"diff",
".",
"removed",
"(",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'changed'",
"]",
"=",
"diff",
".",
"changed",
"(",
")",
"return",
"ret"
] |
ensure a specific value exists at a given path .
|
train
| true
|
55,304
|
def test_cache_deactivated_get_data(config_stub, tmpdir):
config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': True}}
disk_cache = cache.DiskCache(str(tmpdir))
url = QUrl('http://www.example.com/')
assert (disk_cache.data(url) is None)
|
[
"def",
"test_cache_deactivated_get_data",
"(",
"config_stub",
",",
"tmpdir",
")",
":",
"config_stub",
".",
"data",
"=",
"{",
"'storage'",
":",
"{",
"'cache-size'",
":",
"1024",
"}",
",",
"'general'",
":",
"{",
"'private-browsing'",
":",
"True",
"}",
"}",
"disk_cache",
"=",
"cache",
".",
"DiskCache",
"(",
"str",
"(",
"tmpdir",
")",
")",
"url",
"=",
"QUrl",
"(",
"'http://www.example.com/'",
")",
"assert",
"(",
"disk_cache",
".",
"data",
"(",
"url",
")",
"is",
"None",
")"
] |
query some data from a deactivated cache .
|
train
| false
|
55,305
|
def ui_open(*files):
if files:
osname = get_os_name()
opener = _OPENER_BY_OS.get(osname)
if opener:
opener(files)
else:
raise OpenError((u'Open currently not supported for ' + osname))
|
[
"def",
"ui_open",
"(",
"*",
"files",
")",
":",
"if",
"files",
":",
"osname",
"=",
"get_os_name",
"(",
")",
"opener",
"=",
"_OPENER_BY_OS",
".",
"get",
"(",
"osname",
")",
"if",
"opener",
":",
"opener",
"(",
"files",
")",
"else",
":",
"raise",
"OpenError",
"(",
"(",
"u'Open currently not supported for '",
"+",
"osname",
")",
")"
] |
attempts to open the given files using the preferred desktop viewer or editor .
|
train
| true
|
55,306
|
def is_pure_elemwise(graph, inputs):
allowed_ops = (tensor.basic.DimShuffle, tensor.basic.Elemwise)
owner = graph.owner
op = (graph.owner.op if (graph.owner is not None) else None)
if ((owner is None) and (graph in inputs)):
return True
elif ((owner is None) and isinstance(graph, tensor.basic.TensorConstant)):
return True
elif ((owner is None) and (graph not in inputs)):
return False
elif ((op is not None) and (not isinstance(op, allowed_ops))):
return False
else:
if isinstance(graph.owner.op, tensor.basic.DimShuffle):
shuffled = graph.owner.inputs[0]
if (not isinstance(shuffled, tensor.basic.TensorConstant)):
return False
for inp in graph.owner.inputs:
if (not is_pure_elemwise(inp, inputs)):
return False
return True
|
[
"def",
"is_pure_elemwise",
"(",
"graph",
",",
"inputs",
")",
":",
"allowed_ops",
"=",
"(",
"tensor",
".",
"basic",
".",
"DimShuffle",
",",
"tensor",
".",
"basic",
".",
"Elemwise",
")",
"owner",
"=",
"graph",
".",
"owner",
"op",
"=",
"(",
"graph",
".",
"owner",
".",
"op",
"if",
"(",
"graph",
".",
"owner",
"is",
"not",
"None",
")",
"else",
"None",
")",
"if",
"(",
"(",
"owner",
"is",
"None",
")",
"and",
"(",
"graph",
"in",
"inputs",
")",
")",
":",
"return",
"True",
"elif",
"(",
"(",
"owner",
"is",
"None",
")",
"and",
"isinstance",
"(",
"graph",
",",
"tensor",
".",
"basic",
".",
"TensorConstant",
")",
")",
":",
"return",
"True",
"elif",
"(",
"(",
"owner",
"is",
"None",
")",
"and",
"(",
"graph",
"not",
"in",
"inputs",
")",
")",
":",
"return",
"False",
"elif",
"(",
"(",
"op",
"is",
"not",
"None",
")",
"and",
"(",
"not",
"isinstance",
"(",
"op",
",",
"allowed_ops",
")",
")",
")",
":",
"return",
"False",
"else",
":",
"if",
"isinstance",
"(",
"graph",
".",
"owner",
".",
"op",
",",
"tensor",
".",
"basic",
".",
"DimShuffle",
")",
":",
"shuffled",
"=",
"graph",
".",
"owner",
".",
"inputs",
"[",
"0",
"]",
"if",
"(",
"not",
"isinstance",
"(",
"shuffled",
",",
"tensor",
".",
"basic",
".",
"TensorConstant",
")",
")",
":",
"return",
"False",
"for",
"inp",
"in",
"graph",
".",
"owner",
".",
"inputs",
":",
"if",
"(",
"not",
"is_pure_elemwise",
"(",
"inp",
",",
"inputs",
")",
")",
":",
"return",
"False",
"return",
"True"
] |
checks whether a graph is purely elementwise and containing only inputs from a given list .
|
train
| false
|
55,307
|
def range_error_message(error_message, what_to_enter, minimum, maximum):
if (error_message is None):
error_message = ('Enter ' + what_to_enter)
if ((minimum is not None) and (maximum is not None)):
error_message += ' between %(min)g and %(max)g'
elif (minimum is not None):
error_message += ' greater than or equal to %(min)g'
elif (maximum is not None):
error_message += ' less than or equal to %(max)g'
if (type(maximum) in [int, long]):
maximum -= 1
return (translate(error_message) % dict(min=minimum, max=maximum))
|
[
"def",
"range_error_message",
"(",
"error_message",
",",
"what_to_enter",
",",
"minimum",
",",
"maximum",
")",
":",
"if",
"(",
"error_message",
"is",
"None",
")",
":",
"error_message",
"=",
"(",
"'Enter '",
"+",
"what_to_enter",
")",
"if",
"(",
"(",
"minimum",
"is",
"not",
"None",
")",
"and",
"(",
"maximum",
"is",
"not",
"None",
")",
")",
":",
"error_message",
"+=",
"' between %(min)g and %(max)g'",
"elif",
"(",
"minimum",
"is",
"not",
"None",
")",
":",
"error_message",
"+=",
"' greater than or equal to %(min)g'",
"elif",
"(",
"maximum",
"is",
"not",
"None",
")",
":",
"error_message",
"+=",
"' less than or equal to %(max)g'",
"if",
"(",
"type",
"(",
"maximum",
")",
"in",
"[",
"int",
",",
"long",
"]",
")",
":",
"maximum",
"-=",
"1",
"return",
"(",
"translate",
"(",
"error_message",
")",
"%",
"dict",
"(",
"min",
"=",
"minimum",
",",
"max",
"=",
"maximum",
")",
")"
] |
build the error message for the number range validators .
|
train
| false
|
55,308
|
def instance_get_floating_address(context, instance_id):
return IMPL.instance_get_floating_address(context, instance_id)
|
[
"def",
"instance_get_floating_address",
"(",
"context",
",",
"instance_id",
")",
":",
"return",
"IMPL",
".",
"instance_get_floating_address",
"(",
"context",
",",
"instance_id",
")"
] |
get the first floating ip address of an instance .
|
train
| false
|
55,309
|
def _authenticate_mongo_cr(credentials, sock_info):
source = credentials.source
username = credentials.username
password = credentials.password
response = sock_info.command(source, {'getnonce': 1})
nonce = response['nonce']
key = _auth_key(nonce, username, password)
query = SON([('authenticate', 1), ('user', username), ('nonce', nonce), ('key', key)])
sock_info.command(source, query)
|
[
"def",
"_authenticate_mongo_cr",
"(",
"credentials",
",",
"sock_info",
")",
":",
"source",
"=",
"credentials",
".",
"source",
"username",
"=",
"credentials",
".",
"username",
"password",
"=",
"credentials",
".",
"password",
"response",
"=",
"sock_info",
".",
"command",
"(",
"source",
",",
"{",
"'getnonce'",
":",
"1",
"}",
")",
"nonce",
"=",
"response",
"[",
"'nonce'",
"]",
"key",
"=",
"_auth_key",
"(",
"nonce",
",",
"username",
",",
"password",
")",
"query",
"=",
"SON",
"(",
"[",
"(",
"'authenticate'",
",",
"1",
")",
",",
"(",
"'user'",
",",
"username",
")",
",",
"(",
"'nonce'",
",",
"nonce",
")",
",",
"(",
"'key'",
",",
"key",
")",
"]",
")",
"sock_info",
".",
"command",
"(",
"source",
",",
"query",
")"
] |
authenticate using mongodb-cr .
|
train
| true
|
55,310
|
def spawn_raw(function, *args, **kwargs):
if (not callable(function)):
raise TypeError('function must be callable')
hub = get_hub()
if kwargs:
function = _functools_partial(function, *args, **kwargs)
g = RawGreenlet(function, hub)
hub.loop.run_callback(g.switch)
else:
g = RawGreenlet(function, hub)
hub.loop.run_callback(g.switch, *args)
return g
|
[
"def",
"spawn_raw",
"(",
"function",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"callable",
"(",
"function",
")",
")",
":",
"raise",
"TypeError",
"(",
"'function must be callable'",
")",
"hub",
"=",
"get_hub",
"(",
")",
"if",
"kwargs",
":",
"function",
"=",
"_functools_partial",
"(",
"function",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"g",
"=",
"RawGreenlet",
"(",
"function",
",",
"hub",
")",
"hub",
".",
"loop",
".",
"run_callback",
"(",
"g",
".",
"switch",
")",
"else",
":",
"g",
"=",
"RawGreenlet",
"(",
"function",
",",
"hub",
")",
"hub",
".",
"loop",
".",
"run_callback",
"(",
"g",
".",
"switch",
",",
"*",
"args",
")",
"return",
"g"
] |
create a new :class:greenlet .
|
train
| false
|
55,311
|
def get_argument_from_call(callfunc_node, position=None, keyword=None):
if ((position is None) and (keyword is None)):
raise ValueError('Must specify at least one of: position or keyword.')
try:
if ((position is not None) and (not isinstance(callfunc_node.args[position], astroid.Keyword))):
return callfunc_node.args[position]
except IndexError as error:
raise NoSuchArgumentError(error)
if keyword:
for arg in callfunc_node.args:
if (isinstance(arg, astroid.Keyword) and (arg.arg == keyword)):
return arg.value
raise NoSuchArgumentError
|
[
"def",
"get_argument_from_call",
"(",
"callfunc_node",
",",
"position",
"=",
"None",
",",
"keyword",
"=",
"None",
")",
":",
"if",
"(",
"(",
"position",
"is",
"None",
")",
"and",
"(",
"keyword",
"is",
"None",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Must specify at least one of: position or keyword.'",
")",
"try",
":",
"if",
"(",
"(",
"position",
"is",
"not",
"None",
")",
"and",
"(",
"not",
"isinstance",
"(",
"callfunc_node",
".",
"args",
"[",
"position",
"]",
",",
"astroid",
".",
"Keyword",
")",
")",
")",
":",
"return",
"callfunc_node",
".",
"args",
"[",
"position",
"]",
"except",
"IndexError",
"as",
"error",
":",
"raise",
"NoSuchArgumentError",
"(",
"error",
")",
"if",
"keyword",
":",
"for",
"arg",
"in",
"callfunc_node",
".",
"args",
":",
"if",
"(",
"isinstance",
"(",
"arg",
",",
"astroid",
".",
"Keyword",
")",
"and",
"(",
"arg",
".",
"arg",
"==",
"keyword",
")",
")",
":",
"return",
"arg",
".",
"value",
"raise",
"NoSuchArgumentError"
] |
returns the specified argument from a function call .
|
train
| false
|
55,312
|
def _decomp_quad(P, cond=None, rcond=None, lower=True, check_finite=True):
(w, V) = LA.eigh(P, lower=lower, check_finite=check_finite)
if (rcond is not None):
cond = rcond
if (cond in (None, (-1))):
t = V.dtype.char.lower()
factor = {'f': 1000.0, 'd': 1000000.0}
cond = (factor[t] * np.finfo(t).eps)
scale = max(np.absolute(w))
w_scaled = (w / scale)
maskp = (w_scaled > cond)
maskn = (w_scaled < (- cond))
if (np.any(maskp) and np.any(maskn)):
warnings.warn('Forming a nonconvex expression quad_form(x, indefinite).')
M1 = (V[:, maskp] * np.sqrt(w_scaled[maskp]))
M2 = (V[:, maskn] * np.sqrt((- w_scaled[maskn])))
return (scale, M1, M2)
|
[
"def",
"_decomp_quad",
"(",
"P",
",",
"cond",
"=",
"None",
",",
"rcond",
"=",
"None",
",",
"lower",
"=",
"True",
",",
"check_finite",
"=",
"True",
")",
":",
"(",
"w",
",",
"V",
")",
"=",
"LA",
".",
"eigh",
"(",
"P",
",",
"lower",
"=",
"lower",
",",
"check_finite",
"=",
"check_finite",
")",
"if",
"(",
"rcond",
"is",
"not",
"None",
")",
":",
"cond",
"=",
"rcond",
"if",
"(",
"cond",
"in",
"(",
"None",
",",
"(",
"-",
"1",
")",
")",
")",
":",
"t",
"=",
"V",
".",
"dtype",
".",
"char",
".",
"lower",
"(",
")",
"factor",
"=",
"{",
"'f'",
":",
"1000.0",
",",
"'d'",
":",
"1000000.0",
"}",
"cond",
"=",
"(",
"factor",
"[",
"t",
"]",
"*",
"np",
".",
"finfo",
"(",
"t",
")",
".",
"eps",
")",
"scale",
"=",
"max",
"(",
"np",
".",
"absolute",
"(",
"w",
")",
")",
"w_scaled",
"=",
"(",
"w",
"/",
"scale",
")",
"maskp",
"=",
"(",
"w_scaled",
">",
"cond",
")",
"maskn",
"=",
"(",
"w_scaled",
"<",
"(",
"-",
"cond",
")",
")",
"if",
"(",
"np",
".",
"any",
"(",
"maskp",
")",
"and",
"np",
".",
"any",
"(",
"maskn",
")",
")",
":",
"warnings",
".",
"warn",
"(",
"'Forming a nonconvex expression quad_form(x, indefinite).'",
")",
"M1",
"=",
"(",
"V",
"[",
":",
",",
"maskp",
"]",
"*",
"np",
".",
"sqrt",
"(",
"w_scaled",
"[",
"maskp",
"]",
")",
")",
"M2",
"=",
"(",
"V",
"[",
":",
",",
"maskn",
"]",
"*",
"np",
".",
"sqrt",
"(",
"(",
"-",
"w_scaled",
"[",
"maskn",
"]",
")",
")",
")",
"return",
"(",
"scale",
",",
"M1",
",",
"M2",
")"
] |
compute a matrix decomposition .
|
train
| false
|
55,315
|
def make_logged_in_client(username='test', password='test', is_superuser=True, recreate=False, groupname=None):
try:
user = User.objects.get(username=username)
if recreate:
user.delete()
raise User.DoesNotExist
except User.DoesNotExist:
user = User.objects.create_user(username, (username + '@localhost'), password)
user.is_superuser = is_superuser
user.save()
else:
if (user.is_superuser != is_superuser):
user.is_superuser = is_superuser
user.save()
if (groupname is not None):
(group, created) = Group.objects.get_or_create(name=groupname)
if (not user.groups.filter(name=group.name).exists()):
user.groups.add(group)
user.save()
c = Client()
ret = c.login(username=username, password=password)
assert ret, ("Login failed (user '%s')." % username)
return c
|
[
"def",
"make_logged_in_client",
"(",
"username",
"=",
"'test'",
",",
"password",
"=",
"'test'",
",",
"is_superuser",
"=",
"True",
",",
"recreate",
"=",
"False",
",",
"groupname",
"=",
"None",
")",
":",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"username",
")",
"if",
"recreate",
":",
"user",
".",
"delete",
"(",
")",
"raise",
"User",
".",
"DoesNotExist",
"except",
"User",
".",
"DoesNotExist",
":",
"user",
"=",
"User",
".",
"objects",
".",
"create_user",
"(",
"username",
",",
"(",
"username",
"+",
"'@localhost'",
")",
",",
"password",
")",
"user",
".",
"is_superuser",
"=",
"is_superuser",
"user",
".",
"save",
"(",
")",
"else",
":",
"if",
"(",
"user",
".",
"is_superuser",
"!=",
"is_superuser",
")",
":",
"user",
".",
"is_superuser",
"=",
"is_superuser",
"user",
".",
"save",
"(",
")",
"if",
"(",
"groupname",
"is",
"not",
"None",
")",
":",
"(",
"group",
",",
"created",
")",
"=",
"Group",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"groupname",
")",
"if",
"(",
"not",
"user",
".",
"groups",
".",
"filter",
"(",
"name",
"=",
"group",
".",
"name",
")",
".",
"exists",
"(",
")",
")",
":",
"user",
".",
"groups",
".",
"add",
"(",
"group",
")",
"user",
".",
"save",
"(",
")",
"c",
"=",
"Client",
"(",
")",
"ret",
"=",
"c",
".",
"login",
"(",
"username",
"=",
"username",
",",
"password",
"=",
"password",
")",
"assert",
"ret",
",",
"(",
"\"Login failed (user '%s').\"",
"%",
"username",
")",
"return",
"c"
] |
create a client with a user already logged in .
|
train
| false
|
55,316
|
def test_invalid_sigma_clip():
data = np.ones((5, 5))
data[(2, 2)] = 1000
data[(3, 4)] = np.nan
data[(1, 1)] = np.inf
result = sigma_clip(data)
assert result.mask[(2, 2)]
assert result.mask[(3, 4)]
assert result.mask[(1, 1)]
|
[
"def",
"test_invalid_sigma_clip",
"(",
")",
":",
"data",
"=",
"np",
".",
"ones",
"(",
"(",
"5",
",",
"5",
")",
")",
"data",
"[",
"(",
"2",
",",
"2",
")",
"]",
"=",
"1000",
"data",
"[",
"(",
"3",
",",
"4",
")",
"]",
"=",
"np",
".",
"nan",
"data",
"[",
"(",
"1",
",",
"1",
")",
"]",
"=",
"np",
".",
"inf",
"result",
"=",
"sigma_clip",
"(",
"data",
")",
"assert",
"result",
".",
"mask",
"[",
"(",
"2",
",",
"2",
")",
"]",
"assert",
"result",
".",
"mask",
"[",
"(",
"3",
",",
"4",
")",
"]",
"assert",
"result",
".",
"mask",
"[",
"(",
"1",
",",
"1",
")",
"]"
] |
test sigma_clip of data containing invalid values .
|
train
| false
|
55,322
|
def _write_proc_history(fid, info):
if ('proc_history' not in info):
return
if (len(info['proc_history']) > 0):
start_block(fid, FIFF.FIFFB_PROCESSING_HISTORY)
for record in info['proc_history']:
start_block(fid, FIFF.FIFFB_PROCESSING_RECORD)
for (key, id_, writer) in zip(_proc_keys, _proc_ids, _proc_writers):
if (key in record):
writer(fid, id_, record[key])
_write_maxfilter_record(fid, record['max_info'])
if ('smartshield' in record):
for ss in record['smartshield']:
start_block(fid, FIFF.FIFFB_SMARTSHIELD)
end_block(fid, FIFF.FIFFB_SMARTSHIELD)
end_block(fid, FIFF.FIFFB_PROCESSING_RECORD)
end_block(fid, FIFF.FIFFB_PROCESSING_HISTORY)
|
[
"def",
"_write_proc_history",
"(",
"fid",
",",
"info",
")",
":",
"if",
"(",
"'proc_history'",
"not",
"in",
"info",
")",
":",
"return",
"if",
"(",
"len",
"(",
"info",
"[",
"'proc_history'",
"]",
")",
">",
"0",
")",
":",
"start_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_PROCESSING_HISTORY",
")",
"for",
"record",
"in",
"info",
"[",
"'proc_history'",
"]",
":",
"start_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_PROCESSING_RECORD",
")",
"for",
"(",
"key",
",",
"id_",
",",
"writer",
")",
"in",
"zip",
"(",
"_proc_keys",
",",
"_proc_ids",
",",
"_proc_writers",
")",
":",
"if",
"(",
"key",
"in",
"record",
")",
":",
"writer",
"(",
"fid",
",",
"id_",
",",
"record",
"[",
"key",
"]",
")",
"_write_maxfilter_record",
"(",
"fid",
",",
"record",
"[",
"'max_info'",
"]",
")",
"if",
"(",
"'smartshield'",
"in",
"record",
")",
":",
"for",
"ss",
"in",
"record",
"[",
"'smartshield'",
"]",
":",
"start_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_SMARTSHIELD",
")",
"end_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_SMARTSHIELD",
")",
"end_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_PROCESSING_RECORD",
")",
"end_block",
"(",
"fid",
",",
"FIFF",
".",
"FIFFB_PROCESSING_HISTORY",
")"
] |
write processing history to file .
|
train
| false
|
55,324
|
def _comp_match(item, filter_, scope='collection'):
filter_length = len(filter_)
if (scope == 'collection'):
tag = item.collection.get_meta('tag')
else:
for component in item.components():
if (component.name in ('VTODO', 'VEVENT', 'VJOURNAL')):
tag = component.name
break
else:
return False
if (filter_length == 0):
return (filter_.get('name') == tag)
else:
if (filter_length == 1):
if (filter_[0].tag == _tag('C', 'is-not-defined')):
return (filter_.get('name') != tag)
if (filter_[0].tag == _tag('C', 'time-range')):
if (not _time_range_match(item.item, filter_[0], tag)):
return False
filter_ = filter_[1:]
return all(((_prop_match(item, child) if (child.tag == _tag('C', 'prop-filter')) else _comp_match(item, child, scope='component')) for child in filter_))
|
[
"def",
"_comp_match",
"(",
"item",
",",
"filter_",
",",
"scope",
"=",
"'collection'",
")",
":",
"filter_length",
"=",
"len",
"(",
"filter_",
")",
"if",
"(",
"scope",
"==",
"'collection'",
")",
":",
"tag",
"=",
"item",
".",
"collection",
".",
"get_meta",
"(",
"'tag'",
")",
"else",
":",
"for",
"component",
"in",
"item",
".",
"components",
"(",
")",
":",
"if",
"(",
"component",
".",
"name",
"in",
"(",
"'VTODO'",
",",
"'VEVENT'",
",",
"'VJOURNAL'",
")",
")",
":",
"tag",
"=",
"component",
".",
"name",
"break",
"else",
":",
"return",
"False",
"if",
"(",
"filter_length",
"==",
"0",
")",
":",
"return",
"(",
"filter_",
".",
"get",
"(",
"'name'",
")",
"==",
"tag",
")",
"else",
":",
"if",
"(",
"filter_length",
"==",
"1",
")",
":",
"if",
"(",
"filter_",
"[",
"0",
"]",
".",
"tag",
"==",
"_tag",
"(",
"'C'",
",",
"'is-not-defined'",
")",
")",
":",
"return",
"(",
"filter_",
".",
"get",
"(",
"'name'",
")",
"!=",
"tag",
")",
"if",
"(",
"filter_",
"[",
"0",
"]",
".",
"tag",
"==",
"_tag",
"(",
"'C'",
",",
"'time-range'",
")",
")",
":",
"if",
"(",
"not",
"_time_range_match",
"(",
"item",
".",
"item",
",",
"filter_",
"[",
"0",
"]",
",",
"tag",
")",
")",
":",
"return",
"False",
"filter_",
"=",
"filter_",
"[",
"1",
":",
"]",
"return",
"all",
"(",
"(",
"(",
"_prop_match",
"(",
"item",
",",
"child",
")",
"if",
"(",
"child",
".",
"tag",
"==",
"_tag",
"(",
"'C'",
",",
"'prop-filter'",
")",
")",
"else",
"_comp_match",
"(",
"item",
",",
"child",
",",
"scope",
"=",
"'component'",
")",
")",
"for",
"child",
"in",
"filter_",
")",
")"
] |
check whether the item matches the comp filter_ .
|
train
| false
|
55,326
|
def GetRegisteredExe(exeAlias):
return win32api.RegQueryValue(GetRootKey(), ((GetAppPathsKey() + '\\') + exeAlias))
|
[
"def",
"GetRegisteredExe",
"(",
"exeAlias",
")",
":",
"return",
"win32api",
".",
"RegQueryValue",
"(",
"GetRootKey",
"(",
")",
",",
"(",
"(",
"GetAppPathsKey",
"(",
")",
"+",
"'\\\\'",
")",
"+",
"exeAlias",
")",
")"
] |
get a registered .
|
train
| false
|
55,327
|
def xy_color(color):
rgb_cie_converter = rgb_cie.Converter()
hex_color_re = re.compile('(?<!\\w)([a-f0-9]){2}([a-f0-9]){2}([a-f0-9]){2}\\b', re.IGNORECASE)
if (color in CSS_LITERALS):
color = CSS_LITERALS[color]
color = color.lstrip('#')
if (not hex_color_re.match(color)):
print 'Invalid color. Please use a 6-digit hex color.'
sys.exit()
return rgb_cie_converter.hexToCIE1931(color)
|
[
"def",
"xy_color",
"(",
"color",
")",
":",
"rgb_cie_converter",
"=",
"rgb_cie",
".",
"Converter",
"(",
")",
"hex_color_re",
"=",
"re",
".",
"compile",
"(",
"'(?<!\\\\w)([a-f0-9]){2}([a-f0-9]){2}([a-f0-9]){2}\\\\b'",
",",
"re",
".",
"IGNORECASE",
")",
"if",
"(",
"color",
"in",
"CSS_LITERALS",
")",
":",
"color",
"=",
"CSS_LITERALS",
"[",
"color",
"]",
"color",
"=",
"color",
".",
"lstrip",
"(",
"'#'",
")",
"if",
"(",
"not",
"hex_color_re",
".",
"match",
"(",
"color",
")",
")",
":",
"print",
"'Invalid color. Please use a 6-digit hex color.'",
"sys",
".",
"exit",
"(",
")",
"return",
"rgb_cie_converter",
".",
"hexToCIE1931",
"(",
"color",
")"
] |
validate and convert hex color to xy space .
|
train
| false
|
55,328
|
def zero_value_config_set(kodi_setting, all_settings):
try:
if (int(kodi_setting) == 0):
return 'remove_this_line'
except:
pass
return kodi_setting
|
[
"def",
"zero_value_config_set",
"(",
"kodi_setting",
",",
"all_settings",
")",
":",
"try",
":",
"if",
"(",
"int",
"(",
"kodi_setting",
")",
"==",
"0",
")",
":",
"return",
"'remove_this_line'",
"except",
":",
"pass",
"return",
"kodi_setting"
] |
if the value of the kodi setting is zero .
|
train
| false
|
55,329
|
@public
def sring(exprs, *symbols, **options):
single = False
if (not is_sequence(exprs)):
(exprs, single) = ([exprs], True)
exprs = list(map(sympify, exprs))
opt = build_options(symbols, options)
(reps, opt) = _parallel_dict_from_expr(exprs, opt)
if (opt.domain is None):
coeffs = sum([list(rep.values()) for rep in reps], [])
(opt.domain, _) = construct_domain(coeffs, opt=opt)
_ring = PolyRing(opt.gens, opt.domain, opt.order)
polys = list(map(_ring.from_dict, reps))
if single:
return (_ring, polys[0])
else:
return (_ring, polys)
|
[
"@",
"public",
"def",
"sring",
"(",
"exprs",
",",
"*",
"symbols",
",",
"**",
"options",
")",
":",
"single",
"=",
"False",
"if",
"(",
"not",
"is_sequence",
"(",
"exprs",
")",
")",
":",
"(",
"exprs",
",",
"single",
")",
"=",
"(",
"[",
"exprs",
"]",
",",
"True",
")",
"exprs",
"=",
"list",
"(",
"map",
"(",
"sympify",
",",
"exprs",
")",
")",
"opt",
"=",
"build_options",
"(",
"symbols",
",",
"options",
")",
"(",
"reps",
",",
"opt",
")",
"=",
"_parallel_dict_from_expr",
"(",
"exprs",
",",
"opt",
")",
"if",
"(",
"opt",
".",
"domain",
"is",
"None",
")",
":",
"coeffs",
"=",
"sum",
"(",
"[",
"list",
"(",
"rep",
".",
"values",
"(",
")",
")",
"for",
"rep",
"in",
"reps",
"]",
",",
"[",
"]",
")",
"(",
"opt",
".",
"domain",
",",
"_",
")",
"=",
"construct_domain",
"(",
"coeffs",
",",
"opt",
"=",
"opt",
")",
"_ring",
"=",
"PolyRing",
"(",
"opt",
".",
"gens",
",",
"opt",
".",
"domain",
",",
"opt",
".",
"order",
")",
"polys",
"=",
"list",
"(",
"map",
"(",
"_ring",
".",
"from_dict",
",",
"reps",
")",
")",
"if",
"single",
":",
"return",
"(",
"_ring",
",",
"polys",
"[",
"0",
"]",
")",
"else",
":",
"return",
"(",
"_ring",
",",
"polys",
")"
] |
construct a ring deriving generators and domain from options and input expressions .
|
train
| false
|
55,330
|
def getInsetLoopsFromLoops(loops, radius):
insetLoops = []
for loop in loops:
insetLoops += getInsetLoopsFromLoop(loop, radius)
return insetLoops
|
[
"def",
"getInsetLoopsFromLoops",
"(",
"loops",
",",
"radius",
")",
":",
"insetLoops",
"=",
"[",
"]",
"for",
"loop",
"in",
"loops",
":",
"insetLoops",
"+=",
"getInsetLoopsFromLoop",
"(",
"loop",
",",
"radius",
")",
"return",
"insetLoops"
] |
get the inset loops .
|
train
| false
|
55,332
|
def guess_net_inet_tcp_sendbuf_max():
return (16 * MB)
|
[
"def",
"guess_net_inet_tcp_sendbuf_max",
"(",
")",
":",
"return",
"(",
"16",
"*",
"MB",
")"
] |
maximum size for tcp send buffers see guess_kern_ipc_maxsockbuf() .
|
train
| false
|
55,333
|
def _generate_zip_package(target, sources, sources_dir):
zip = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED)
manifest = _archive_package_sources(zip.write, sources, sources_dir)
zip.writestr(_PACKAGE_MANIFEST, ('\n'.join(manifest) + '\n'))
zip.close()
return None
|
[
"def",
"_generate_zip_package",
"(",
"target",
",",
"sources",
",",
"sources_dir",
")",
":",
"zip",
"=",
"zipfile",
".",
"ZipFile",
"(",
"target",
",",
"'w'",
",",
"zipfile",
".",
"ZIP_DEFLATED",
")",
"manifest",
"=",
"_archive_package_sources",
"(",
"zip",
".",
"write",
",",
"sources",
",",
"sources_dir",
")",
"zip",
".",
"writestr",
"(",
"_PACKAGE_MANIFEST",
",",
"(",
"'\\n'",
".",
"join",
"(",
"manifest",
")",
"+",
"'\\n'",
")",
")",
"zip",
".",
"close",
"(",
")",
"return",
"None"
] |
generate a zip archive containing all of the source files .
|
train
| false
|
55,334
|
def getNewRepository():
return ExportRepository()
|
[
"def",
"getNewRepository",
"(",
")",
":",
"return",
"ExportRepository",
"(",
")"
] |
get the repository constructor .
|
train
| false
|
55,336
|
def pretty_css(container, name, raw):
sheet = container.parse_css(raw)
return serialize(sheet, u'text/css')
|
[
"def",
"pretty_css",
"(",
"container",
",",
"name",
",",
"raw",
")",
":",
"sheet",
"=",
"container",
".",
"parse_css",
"(",
"raw",
")",
"return",
"serialize",
"(",
"sheet",
",",
"u'text/css'",
")"
] |
pretty print the css represented as a string in raw .
|
train
| false
|
55,337
|
def extract_bool(name, value):
if (str(value).lower() not in ('true', 'false')):
raise ValueError((_('Unrecognized value "%(value)s" for "%(name)s", acceptable values are: true, false.') % {'value': value, 'name': name}))
return strutils.bool_from_string(value, strict=True)
|
[
"def",
"extract_bool",
"(",
"name",
",",
"value",
")",
":",
"if",
"(",
"str",
"(",
"value",
")",
".",
"lower",
"(",
")",
"not",
"in",
"(",
"'true'",
",",
"'false'",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"_",
"(",
"'Unrecognized value \"%(value)s\" for \"%(name)s\", acceptable values are: true, false.'",
")",
"%",
"{",
"'value'",
":",
"value",
",",
"'name'",
":",
"name",
"}",
")",
")",
"return",
"strutils",
".",
"bool_from_string",
"(",
"value",
",",
"strict",
"=",
"True",
")"
] |
convert any true/false string to its corresponding boolean value .
|
train
| false
|
55,338
|
def format_version(module, attr, call=False):
try:
if call:
version = getattr(module, attr)()
else:
version = getattr(module, attr)
except Exception as e:
print e
version = 'Version could not be acquired!'
if (not isinstance(version, str)):
version = list2string(version)
return version
|
[
"def",
"format_version",
"(",
"module",
",",
"attr",
",",
"call",
"=",
"False",
")",
":",
"try",
":",
"if",
"call",
":",
"version",
"=",
"getattr",
"(",
"module",
",",
"attr",
")",
"(",
")",
"else",
":",
"version",
"=",
"getattr",
"(",
"module",
",",
"attr",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"e",
"version",
"=",
"'Version could not be acquired!'",
"if",
"(",
"not",
"isinstance",
"(",
"version",
",",
"str",
")",
")",
":",
"version",
"=",
"list2string",
"(",
"version",
")",
"return",
"version"
] |
format the version .
|
train
| false
|
55,339
|
def require_open(func):
def wrapper(self, *args, **kwargs):
if self.closed:
raise IOError(errno.EBADF, 'I/O operation on closed file')
return func(self, *args, **kwargs)
return wrapper
|
[
"def",
"require_open",
"(",
"func",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"self",
".",
"closed",
":",
"raise",
"IOError",
"(",
"errno",
".",
"EBADF",
",",
"'I/O operation on closed file'",
")",
"return",
"func",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"wrapper"
] |
decorator that ensures that the file instance isnt closed when the function is run .
|
train
| false
|
55,340
|
@dec.skip_win32
def test_find_cmd_ls():
path = find_cmd('ls')
nt.assert_true(path.endswith('ls'))
|
[
"@",
"dec",
".",
"skip_win32",
"def",
"test_find_cmd_ls",
"(",
")",
":",
"path",
"=",
"find_cmd",
"(",
"'ls'",
")",
"nt",
".",
"assert_true",
"(",
"path",
".",
"endswith",
"(",
"'ls'",
")",
")"
] |
make sure we can find the full path to ls .
|
train
| false
|
55,341
|
def get_area(y, x):
return trapz(y=y, x=x)
|
[
"def",
"get_area",
"(",
"y",
",",
"x",
")",
":",
"return",
"trapz",
"(",
"y",
"=",
"y",
",",
"x",
"=",
"x",
")"
] |
get the area under the curve .
|
train
| false
|
55,342
|
def user_registered_cb(user, **kwargs):
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get(u'mail_send_new_user_mail'):
mail_new_user(user)
|
[
"def",
"user_registered_cb",
"(",
"user",
",",
"**",
"kwargs",
")",
":",
"siteconfig",
"=",
"SiteConfiguration",
".",
"objects",
".",
"get_current",
"(",
")",
"if",
"siteconfig",
".",
"get",
"(",
"u'mail_send_new_user_mail'",
")",
":",
"mail_new_user",
"(",
"user",
")"
] |
send e-mail when a user is registered .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.