id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
38,322
@require_context def to_device(obj, stream=0, copy=True, to=None): if (to is None): (to, new) = devicearray.auto_device(obj, stream=stream, copy=copy) return to if copy: to.copy_to_device(obj, stream=stream) return to
[ "@", "require_context", "def", "to_device", "(", "obj", ",", "stream", "=", "0", ",", "copy", "=", "True", ",", "to", "=", "None", ")", ":", "if", "(", "to", "is", "None", ")", ":", "(", "to", ",", "new", ")", "=", "devicearray", ".", "auto_device", "(", "obj", ",", "stream", "=", "stream", ",", "copy", "=", "copy", ")", "return", "to", "if", "copy", ":", "to", ".", "copy_to_device", "(", "obj", ",", "stream", "=", "stream", ")", "return", "to" ]
to_device allocate and transfer a numpy ndarray or structured scalar to the device .
train
false
38,323
def dup_reverse(f): return dup_strip(list(reversed(f)))
[ "def", "dup_reverse", "(", "f", ")", ":", "return", "dup_strip", "(", "list", "(", "reversed", "(", "f", ")", ")", ")" ]
compute x**n * f .
train
false
38,324
def _is_column(col): return isinstance(col, ColumnElement)
[ "def", "_is_column", "(", "col", ")", ":", "return", "isinstance", "(", "col", ",", "ColumnElement", ")" ]
true if col is an instance of :class: .
train
false
38,326
def in_random_order(data): indexes = [i for (i, _) in enumerate(data)] random.shuffle(indexes) for i in indexes: (yield data[i])
[ "def", "in_random_order", "(", "data", ")", ":", "indexes", "=", "[", "i", "for", "(", "i", ",", "_", ")", "in", "enumerate", "(", "data", ")", "]", "random", ".", "shuffle", "(", "indexes", ")", "for", "i", "in", "indexes", ":", "(", "yield", "data", "[", "i", "]", ")" ]
generator that returns the elements of data in random order .
train
false
38,327
def laplacian(ndim, shape, is_real=True): impr = np.zeros(([3] * ndim)) for dim in range(ndim): idx = tuple(((([slice(1, 2)] * dim) + [slice(None)]) + ([slice(1, 2)] * ((ndim - dim) - 1)))) impr[idx] = np.array([(-1.0), 0.0, (-1.0)]).reshape([((-1) if (i == dim) else 1) for i in range(ndim)]) impr[([slice(1, 2)] * ndim)] = (2.0 * ndim) return (ir2tf(impr, shape, is_real=is_real), impr)
[ "def", "laplacian", "(", "ndim", ",", "shape", ",", "is_real", "=", "True", ")", ":", "impr", "=", "np", ".", "zeros", "(", "(", "[", "3", "]", "*", "ndim", ")", ")", "for", "dim", "in", "range", "(", "ndim", ")", ":", "idx", "=", "tuple", "(", "(", "(", "(", "[", "slice", "(", "1", ",", "2", ")", "]", "*", "dim", ")", "+", "[", "slice", "(", "None", ")", "]", ")", "+", "(", "[", "slice", "(", "1", ",", "2", ")", "]", "*", "(", "(", "ndim", "-", "dim", ")", "-", "1", ")", ")", ")", ")", "impr", "[", "idx", "]", "=", "np", ".", "array", "(", "[", "(", "-", "1.0", ")", ",", "0.0", ",", "(", "-", "1.0", ")", "]", ")", ".", "reshape", "(", "[", "(", "(", "-", "1", ")", "if", "(", "i", "==", "dim", ")", "else", "1", ")", "for", "i", "in", "range", "(", "ndim", ")", "]", ")", "impr", "[", "(", "[", "slice", "(", "1", ",", "2", ")", "]", "*", "ndim", ")", "]", "=", "(", "2.0", "*", "ndim", ")", "return", "(", "ir2tf", "(", "impr", ",", "shape", ",", "is_real", "=", "is_real", ")", ",", "impr", ")" ]
return the transfer function of the laplacian .
train
false
38,328
def recv_arrays(socket): headers = socket.recv_json() if ('stop' in headers): raise StopIteration arrays = [] for header in headers: data = socket.recv() buf = buffer_(data) array = numpy.frombuffer(buf, dtype=numpy.dtype(header['descr'])) array.shape = header['shape'] if header['fortran_order']: array.shape = header['shape'][::(-1)] array = array.transpose() arrays.append(array) return arrays
[ "def", "recv_arrays", "(", "socket", ")", ":", "headers", "=", "socket", ".", "recv_json", "(", ")", "if", "(", "'stop'", "in", "headers", ")", ":", "raise", "StopIteration", "arrays", "=", "[", "]", "for", "header", "in", "headers", ":", "data", "=", "socket", ".", "recv", "(", ")", "buf", "=", "buffer_", "(", "data", ")", "array", "=", "numpy", ".", "frombuffer", "(", "buf", ",", "dtype", "=", "numpy", ".", "dtype", "(", "header", "[", "'descr'", "]", ")", ")", "array", ".", "shape", "=", "header", "[", "'shape'", "]", "if", "header", "[", "'fortran_order'", "]", ":", "array", ".", "shape", "=", "header", "[", "'shape'", "]", "[", ":", ":", "(", "-", "1", ")", "]", "array", "=", "array", ".", "transpose", "(", ")", "arrays", ".", "append", "(", "array", ")", "return", "arrays" ]
receive a list of numpy arrays .
train
true
38,330
def get_versions_string(): result = [] for version in get_versions_list(): result.append((' * %s %s' % (version[0], version[2]))) return '\n'.join(result)
[ "def", "get_versions_string", "(", ")", ":", "result", "=", "[", "]", "for", "version", "in", "get_versions_list", "(", ")", ":", "result", ".", "append", "(", "(", "' * %s %s'", "%", "(", "version", "[", "0", "]", ",", "version", "[", "2", "]", ")", ")", ")", "return", "'\\n'", ".", "join", "(", "result", ")" ]
returns string with version information summary .
train
false
38,332
@handle_response_format @treeio_login_required def receivable_edit(request, receivable_id, response_format='html'): receivable = get_object_or_404(Liability, pk=receivable_id) if request.POST: if ('cancel' not in request.POST): form = ReceivableForm(request.user.profile, request.POST, instance=receivable) if form.is_valid(): receivable = form.save(commit=False) convert(receivable, 'value') return HttpResponseRedirect(reverse('finance_receivable_view', args=[receivable.id])) else: return HttpResponseRedirect(reverse('finance_receivable_view', args=[receivable.id])) else: form = ReceivableForm(request.user.profile, instance=receivable) return render_to_response('finance/receivable_edit', {'form': form, 'liability': receivable}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "receivable_edit", "(", "request", ",", "receivable_id", ",", "response_format", "=", "'html'", ")", ":", "receivable", "=", "get_object_or_404", "(", "Liability", ",", "pk", "=", "receivable_id", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "ReceivableForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "receivable", ")", "if", "form", ".", "is_valid", "(", ")", ":", "receivable", "=", "form", ".", "save", "(", "commit", "=", "False", ")", "convert", "(", "receivable", ",", "'value'", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_receivable_view'", ",", "args", "=", "[", "receivable", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_receivable_view'", ",", "args", "=", "[", "receivable", ".", "id", "]", ")", ")", "else", ":", "form", "=", "ReceivableForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "receivable", ")", "return", "render_to_response", "(", "'finance/receivable_edit'", ",", "{", "'form'", ":", "form", ",", "'liability'", ":", "receivable", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
liability edit page .
train
false
38,333
def _get_dvs_uplink_portgroup(dvs, portgroup_name): for portgroup in dvs.portgroup: if (portgroup.name == portgroup_name): return portgroup return None
[ "def", "_get_dvs_uplink_portgroup", "(", "dvs", ",", "portgroup_name", ")", ":", "for", "portgroup", "in", "dvs", ".", "portgroup", ":", "if", "(", "portgroup", ".", "name", "==", "portgroup_name", ")", ":", "return", "portgroup", "return", "None" ]
return a portgroup object corresponding to the portgroup name on the dvs .
train
true
38,335
def update_moderator_ids(committer_id, moderator_usernames): moderator_ids = [] for username in moderator_usernames: user_id = user_services.get_user_id_from_username(username) if (user_id is not None): moderator_ids.append(user_id) else: raise Exception(('Bad moderator username: %s' % username)) Registry.get_config_property('moderator_ids').set_value(committer_id, moderator_ids)
[ "def", "update_moderator_ids", "(", "committer_id", ",", "moderator_usernames", ")", ":", "moderator_ids", "=", "[", "]", "for", "username", "in", "moderator_usernames", ":", "user_id", "=", "user_services", ".", "get_user_id_from_username", "(", "username", ")", "if", "(", "user_id", "is", "not", "None", ")", ":", "moderator_ids", ".", "append", "(", "user_id", ")", "else", ":", "raise", "Exception", "(", "(", "'Bad moderator username: %s'", "%", "username", ")", ")", "Registry", ".", "get_config_property", "(", "'moderator_ids'", ")", ".", "set_value", "(", "committer_id", ",", "moderator_ids", ")" ]
refresh the list of moderator user_ids based on the usernames entered .
train
false
38,336
def etree_to_dict(element, iterable=(list, list.append)): if ((element.text is None) or element.text.isspace()): retval = odict() for elt in element: if (not (elt.tag in retval)): retval[elt.tag] = iterable[0]() iterable[1](retval[elt.tag], etree_to_dict(elt, iterable)) else: retval = element.text return retval
[ "def", "etree_to_dict", "(", "element", ",", "iterable", "=", "(", "list", ",", "list", ".", "append", ")", ")", ":", "if", "(", "(", "element", ".", "text", "is", "None", ")", "or", "element", ".", "text", ".", "isspace", "(", ")", ")", ":", "retval", "=", "odict", "(", ")", "for", "elt", "in", "element", ":", "if", "(", "not", "(", "elt", ".", "tag", "in", "retval", ")", ")", ":", "retval", "[", "elt", ".", "tag", "]", "=", "iterable", "[", "0", "]", "(", ")", "iterable", "[", "1", "]", "(", "retval", "[", "elt", ".", "tag", "]", ",", "etree_to_dict", "(", "elt", ",", "iterable", ")", ")", "else", ":", "retval", "=", "element", ".", "text", "return", "retval" ]
takes an xml root element and returns the corresponding dict .
train
false
38,337
@get('/scan/<taskid>/kill') def scan_kill(taskid): if ((taskid not in DataStore.tasks) or (DataStore.tasks[taskid].engine_process() is None) or DataStore.tasks[taskid].engine_has_terminated()): logger.warning(('[%s] Invalid task ID provided to scan_kill()' % taskid)) return jsonize({'success': False, 'message': 'Invalid task ID'}) DataStore.tasks[taskid].engine_kill() logger.debug(('[%s] Killed scan' % taskid)) return jsonize({'success': True})
[ "@", "get", "(", "'/scan/<taskid>/kill'", ")", "def", "scan_kill", "(", "taskid", ")", ":", "if", "(", "(", "taskid", "not", "in", "DataStore", ".", "tasks", ")", "or", "(", "DataStore", ".", "tasks", "[", "taskid", "]", ".", "engine_process", "(", ")", "is", "None", ")", "or", "DataStore", ".", "tasks", "[", "taskid", "]", ".", "engine_has_terminated", "(", ")", ")", ":", "logger", ".", "warning", "(", "(", "'[%s] Invalid task ID provided to scan_kill()'", "%", "taskid", ")", ")", "return", "jsonize", "(", "{", "'success'", ":", "False", ",", "'message'", ":", "'Invalid task ID'", "}", ")", "DataStore", ".", "tasks", "[", "taskid", "]", ".", "engine_kill", "(", ")", "logger", ".", "debug", "(", "(", "'[%s] Killed scan'", "%", "taskid", ")", ")", "return", "jsonize", "(", "{", "'success'", ":", "True", "}", ")" ]
kill a scan .
train
false
38,339
def _numeric(n): return isinstance(n, (salt.ext.six.integer_types + (float,)))
[ "def", "_numeric", "(", "n", ")", ":", "return", "isinstance", "(", "n", ",", "(", "salt", ".", "ext", ".", "six", ".", "integer_types", "+", "(", "float", ",", ")", ")", ")" ]
tell whether an argument is numeric .
train
false
38,340
def widget_select(request): profile = Profile.objects.get_or_create(user=request.user)[0] profile_data = profile.extra_data widget_type = request.POST.get(u'type') if (widget_type == u'primary'): selections_key = u'primary_widget_selections' widgets = primary_widgets else: selections_key = u'secondary_widget_selections' widgets = secondary_widgets initial_selections = {} for widget in widgets: initial_selections[widget.widget_id] = u'1' selections = profile_data.setdefault(selections_key, initial_selections) for widget in widgets: widget_selection = request.POST.get(widget.widget_id) if (widget_selection is not None): selections[widget.widget_id] = widget_selection profile.save() return HttpResponse()
[ "def", "widget_select", "(", "request", ")", ":", "profile", "=", "Profile", ".", "objects", ".", "get_or_create", "(", "user", "=", "request", ".", "user", ")", "[", "0", "]", "profile_data", "=", "profile", ".", "extra_data", "widget_type", "=", "request", ".", "POST", ".", "get", "(", "u'type'", ")", "if", "(", "widget_type", "==", "u'primary'", ")", ":", "selections_key", "=", "u'primary_widget_selections'", "widgets", "=", "primary_widgets", "else", ":", "selections_key", "=", "u'secondary_widget_selections'", "widgets", "=", "secondary_widgets", "initial_selections", "=", "{", "}", "for", "widget", "in", "widgets", ":", "initial_selections", "[", "widget", ".", "widget_id", "]", "=", "u'1'", "selections", "=", "profile_data", ".", "setdefault", "(", "selections_key", ",", "initial_selections", ")", "for", "widget", "in", "widgets", ":", "widget_selection", "=", "request", ".", "POST", ".", "get", "(", "widget", ".", "widget_id", ")", "if", "(", "widget_selection", "is", "not", "None", ")", ":", "selections", "[", "widget", ".", "widget_id", "]", "=", "widget_selection", "profile", ".", "save", "(", ")", "return", "HttpResponse", "(", ")" ]
handle selection of admin widgets .
train
false
38,341
def _statXform(line): (numMsgs, totalSize) = line.split(None, 1) return (int(numMsgs), int(totalSize))
[ "def", "_statXform", "(", "line", ")", ":", "(", "numMsgs", ",", "totalSize", ")", "=", "line", ".", "split", "(", "None", ",", "1", ")", "return", "(", "int", "(", "numMsgs", ")", ",", "int", "(", "totalSize", ")", ")" ]
parse the response to a stat command .
train
false
38,343
def evaluate_marker(text, extra=None): try: marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: raise SyntaxError(e)
[ "def", "evaluate_marker", "(", "text", ",", "extra", "=", "None", ")", ":", "try", ":", "marker", "=", "packaging", ".", "markers", ".", "Marker", "(", "text", ")", "return", "marker", ".", "evaluate", "(", ")", "except", "packaging", ".", "markers", ".", "InvalidMarker", "as", "e", ":", "raise", "SyntaxError", "(", "e", ")" ]
evaluate a pep 508 environment marker .
train
true
38,344
def scheduled_sample(ground_truth_x, generated_x, batch_size, num_ground_truth): idx = tf.random_shuffle(tf.range(int(batch_size))) ground_truth_idx = tf.gather(idx, tf.range(num_ground_truth)) generated_idx = tf.gather(idx, tf.range(num_ground_truth, int(batch_size))) ground_truth_examps = tf.gather(ground_truth_x, ground_truth_idx) generated_examps = tf.gather(generated_x, generated_idx) return tf.dynamic_stitch([ground_truth_idx, generated_idx], [ground_truth_examps, generated_examps])
[ "def", "scheduled_sample", "(", "ground_truth_x", ",", "generated_x", ",", "batch_size", ",", "num_ground_truth", ")", ":", "idx", "=", "tf", ".", "random_shuffle", "(", "tf", ".", "range", "(", "int", "(", "batch_size", ")", ")", ")", "ground_truth_idx", "=", "tf", ".", "gather", "(", "idx", ",", "tf", ".", "range", "(", "num_ground_truth", ")", ")", "generated_idx", "=", "tf", ".", "gather", "(", "idx", ",", "tf", ".", "range", "(", "num_ground_truth", ",", "int", "(", "batch_size", ")", ")", ")", "ground_truth_examps", "=", "tf", ".", "gather", "(", "ground_truth_x", ",", "ground_truth_idx", ")", "generated_examps", "=", "tf", ".", "gather", "(", "generated_x", ",", "generated_idx", ")", "return", "tf", ".", "dynamic_stitch", "(", "[", "ground_truth_idx", ",", "generated_idx", "]", ",", "[", "ground_truth_examps", ",", "generated_examps", "]", ")" ]
sample batch with specified mix of ground truth and generated data points .
train
false
38,345
def unpause(path, service_names=None): project = __load_project(path) debug_ret = {} result = {} if isinstance(project, dict): return project else: try: project.unpause(service_names) if debug: for container in project.containers(): if ((service_names is None) or (container.get('Name')[1:] in service_names)): container.inspect_if_not_inspected() debug_ret[container.get('Name')] = container.inspect() result[container.get('Name')] = 'unpaused' except Exception as inst: return __handle_except(inst) return __standardize_result(True, 'Un-Pausing containers via docker-compose', result, debug_ret)
[ "def", "unpause", "(", "path", ",", "service_names", "=", "None", ")", ":", "project", "=", "__load_project", "(", "path", ")", "debug_ret", "=", "{", "}", "result", "=", "{", "}", "if", "isinstance", "(", "project", ",", "dict", ")", ":", "return", "project", "else", ":", "try", ":", "project", ".", "unpause", "(", "service_names", ")", "if", "debug", ":", "for", "container", "in", "project", ".", "containers", "(", ")", ":", "if", "(", "(", "service_names", "is", "None", ")", "or", "(", "container", ".", "get", "(", "'Name'", ")", "[", "1", ":", "]", "in", "service_names", ")", ")", ":", "container", ".", "inspect_if_not_inspected", "(", ")", "debug_ret", "[", "container", ".", "get", "(", "'Name'", ")", "]", "=", "container", ".", "inspect", "(", ")", "result", "[", "container", ".", "get", "(", "'Name'", ")", "]", "=", "'unpaused'", "except", "Exception", "as", "inst", ":", "return", "__handle_except", "(", "inst", ")", "return", "__standardize_result", "(", "True", ",", "'Un-Pausing containers via docker-compose'", ",", "result", ",", "debug_ret", ")" ]
unpauses a container name container name or id **return data** a dictionary will be returned .
train
true
38,347
def get_lang_from_cookie(request, supported): from django.conf import settings lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME) if (lang_code and (lang_code in supported)): return lang_code return None
[ "def", "get_lang_from_cookie", "(", "request", ",", "supported", ")", ":", "from", "django", ".", "conf", "import", "settings", "lang_code", "=", "request", ".", "COOKIES", ".", "get", "(", "settings", ".", "LANGUAGE_COOKIE_NAME", ")", "if", "(", "lang_code", "and", "(", "lang_code", "in", "supported", ")", ")", ":", "return", "lang_code", "return", "None" ]
see if the users browser sent a cookie with a preferred language .
train
false
38,348
def uniform_entropy_vec(loc, scale): if (isinstance(loc, float) or isinstance(loc, int)): return stats.uniform.entropy(loc, scale) else: return np.array([stats.uniform.entropy(loc_x, scale_x) for (loc_x, scale_x) in zip(loc, scale)])
[ "def", "uniform_entropy_vec", "(", "loc", ",", "scale", ")", ":", "if", "(", "isinstance", "(", "loc", ",", "float", ")", "or", "isinstance", "(", "loc", ",", "int", ")", ")", ":", "return", "stats", ".", "uniform", ".", "entropy", "(", "loc", ",", "scale", ")", "else", ":", "return", "np", ".", "array", "(", "[", "stats", ".", "uniform", ".", "entropy", "(", "loc_x", ",", "scale_x", ")", "for", "(", "loc_x", ",", "scale_x", ")", "in", "zip", "(", "loc", ",", "scale", ")", "]", ")" ]
vectorized version of stats .
train
false
38,349
def _get_changed_items(baselist, comparelist): return list((set(baselist) & set((set(baselist) ^ set(comparelist)))))
[ "def", "_get_changed_items", "(", "baselist", ",", "comparelist", ")", ":", "return", "list", "(", "(", "set", "(", "baselist", ")", "&", "set", "(", "(", "set", "(", "baselist", ")", "^", "set", "(", "comparelist", ")", ")", ")", ")", ")" ]
return changed items as they relate to baselist .
train
false
38,350
def get_display_profile(handle=None): if (sys.platform == 'win32'): from PIL import ImageWin if isinstance(handle, ImageWin.HDC): profile = core.get_display_profile_win32(handle, 1) else: profile = core.get_display_profile_win32((handle or 0)) else: try: get = _imagingcms.get_display_profile except AttributeError: return None else: profile = get() return ImageCmsProfile(profile)
[ "def", "get_display_profile", "(", "handle", "=", "None", ")", ":", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "from", "PIL", "import", "ImageWin", "if", "isinstance", "(", "handle", ",", "ImageWin", ".", "HDC", ")", ":", "profile", "=", "core", ".", "get_display_profile_win32", "(", "handle", ",", "1", ")", "else", ":", "profile", "=", "core", ".", "get_display_profile_win32", "(", "(", "handle", "or", "0", ")", ")", "else", ":", "try", ":", "get", "=", "_imagingcms", ".", "get_display_profile", "except", "AttributeError", ":", "return", "None", "else", ":", "profile", "=", "get", "(", ")", "return", "ImageCmsProfile", "(", "profile", ")" ]
fetches the profile for the current display device .
train
false
38,351
def forget_canvas(canvas): cc = [c() for c in canvasses if (c() is not None)] while (canvas in cc): cc.remove(canvas) canvasses[:] = [weakref.ref(c) for c in cc]
[ "def", "forget_canvas", "(", "canvas", ")", ":", "cc", "=", "[", "c", "(", ")", "for", "c", "in", "canvasses", "if", "(", "c", "(", ")", "is", "not", "None", ")", "]", "while", "(", "canvas", "in", "cc", ")", ":", "cc", ".", "remove", "(", "canvas", ")", "canvasses", "[", ":", "]", "=", "[", "weakref", ".", "ref", "(", "c", ")", "for", "c", "in", "cc", "]" ]
forget about the given canvas .
train
true
38,352
@ignore_warnings def test_classifier_results(): alpha = 0.1 n_features = 20 n_samples = 10 tol = 0.01 max_iter = 200 rng = np.random.RandomState(0) X = rng.normal(size=(n_samples, n_features)) w = rng.normal(size=n_features) y = np.dot(X, w) y = np.sign(y) clf1 = LogisticRegression(solver='sag', C=((1.0 / alpha) / n_samples), max_iter=max_iter, tol=tol, random_state=77) clf2 = clone(clf1) clf1.fit(X, y) clf2.fit(sp.csr_matrix(X), y) pred1 = clf1.predict(X) pred2 = clf2.predict(X) assert_almost_equal(pred1, y, decimal=12) assert_almost_equal(pred2, y, decimal=12)
[ "@", "ignore_warnings", "def", "test_classifier_results", "(", ")", ":", "alpha", "=", "0.1", "n_features", "=", "20", "n_samples", "=", "10", "tol", "=", "0.01", "max_iter", "=", "200", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "X", "=", "rng", ".", "normal", "(", "size", "=", "(", "n_samples", ",", "n_features", ")", ")", "w", "=", "rng", ".", "normal", "(", "size", "=", "n_features", ")", "y", "=", "np", ".", "dot", "(", "X", ",", "w", ")", "y", "=", "np", ".", "sign", "(", "y", ")", "clf1", "=", "LogisticRegression", "(", "solver", "=", "'sag'", ",", "C", "=", "(", "(", "1.0", "/", "alpha", ")", "/", "n_samples", ")", ",", "max_iter", "=", "max_iter", ",", "tol", "=", "tol", ",", "random_state", "=", "77", ")", "clf2", "=", "clone", "(", "clf1", ")", "clf1", ".", "fit", "(", "X", ",", "y", ")", "clf2", ".", "fit", "(", "sp", ".", "csr_matrix", "(", "X", ")", ",", "y", ")", "pred1", "=", "clf1", ".", "predict", "(", "X", ")", "pred2", "=", "clf2", ".", "predict", "(", "X", ")", "assert_almost_equal", "(", "pred1", ",", "y", ",", "decimal", "=", "12", ")", "assert_almost_equal", "(", "pred2", ",", "y", ",", "decimal", "=", "12", ")" ]
tests if classifier results match target .
train
false
38,353
def _rec_diff_eval(g, m, a, v, i, j, K): if (i == j): return dmp_eval(dmp_diff(g, m, v, K), a, v, K) (v, i) = ((v - 1), (i + 1)) return dmp_strip([_rec_diff_eval(c, m, a, v, i, j, K) for c in g], v)
[ "def", "_rec_diff_eval", "(", "g", ",", "m", ",", "a", ",", "v", ",", "i", ",", "j", ",", "K", ")", ":", "if", "(", "i", "==", "j", ")", ":", "return", "dmp_eval", "(", "dmp_diff", "(", "g", ",", "m", ",", "v", ",", "K", ")", ",", "a", ",", "v", ",", "K", ")", "(", "v", ",", "i", ")", "=", "(", "(", "v", "-", "1", ")", ",", "(", "i", "+", "1", ")", ")", "return", "dmp_strip", "(", "[", "_rec_diff_eval", "(", "c", ",", "m", ",", "a", ",", "v", ",", "i", ",", "j", ",", "K", ")", "for", "c", "in", "g", "]", ",", "v", ")" ]
recursive helper for :func:dmp_diff_eval .
train
false
38,355
def make_domains(lists): from six.moves import range domains = [] for iterable in lists: new_domain = iterable[:] for i in range(len(domains)): domains[i] = (domains[i] * len(iterable)) if domains: missing = ((len(domains[0]) - len(iterable)) / len(iterable)) i = 0 for j in range(len(iterable)): value = iterable[j] for dummy in range(missing): new_domain.insert(i, value) i += 1 i += 1 domains.append(new_domain) return domains
[ "def", "make_domains", "(", "lists", ")", ":", "from", "six", ".", "moves", "import", "range", "domains", "=", "[", "]", "for", "iterable", "in", "lists", ":", "new_domain", "=", "iterable", "[", ":", "]", "for", "i", "in", "range", "(", "len", "(", "domains", ")", ")", ":", "domains", "[", "i", "]", "=", "(", "domains", "[", "i", "]", "*", "len", "(", "iterable", ")", ")", "if", "domains", ":", "missing", "=", "(", "(", "len", "(", "domains", "[", "0", "]", ")", "-", "len", "(", "iterable", ")", ")", "/", "len", "(", "iterable", ")", ")", "i", "=", "0", "for", "j", "in", "range", "(", "len", "(", "iterable", ")", ")", ":", "value", "=", "iterable", "[", "j", "]", "for", "dummy", "in", "range", "(", "missing", ")", ":", "new_domain", ".", "insert", "(", "i", ",", "value", ")", "i", "+=", "1", "i", "+=", "1", "domains", ".", "append", "(", "new_domain", ")", "return", "domains" ]
given a list of lists .
train
false
38,356
def avgFoundAfter(decreasingTargetValues, listsOfActualValues, batchSize=1, useMedian=False): from scipy import sum numLists = len(listsOfActualValues) longest = max(list(map(len, listsOfActualValues))) res = [[0] for _ in range(numLists)] for tval in decreasingTargetValues: for (li, l) in enumerate(listsOfActualValues): lres = res[li] found = False for i in range(lres[(-1)], len(l)): if (l[i] <= tval): lres.append(i) found = True break if (not found): lres.append(longest) tmp = array(res) if useMedian: resx = median(tmp, axis=0)[1:] else: resx = (sum(tmp, axis=0)[1:] / float(numLists)) return (resx * batchSize)
[ "def", "avgFoundAfter", "(", "decreasingTargetValues", ",", "listsOfActualValues", ",", "batchSize", "=", "1", ",", "useMedian", "=", "False", ")", ":", "from", "scipy", "import", "sum", "numLists", "=", "len", "(", "listsOfActualValues", ")", "longest", "=", "max", "(", "list", "(", "map", "(", "len", ",", "listsOfActualValues", ")", ")", ")", "res", "=", "[", "[", "0", "]", "for", "_", "in", "range", "(", "numLists", ")", "]", "for", "tval", "in", "decreasingTargetValues", ":", "for", "(", "li", ",", "l", ")", "in", "enumerate", "(", "listsOfActualValues", ")", ":", "lres", "=", "res", "[", "li", "]", "found", "=", "False", "for", "i", "in", "range", "(", "lres", "[", "(", "-", "1", ")", "]", ",", "len", "(", "l", ")", ")", ":", "if", "(", "l", "[", "i", "]", "<=", "tval", ")", ":", "lres", ".", "append", "(", "i", ")", "found", "=", "True", "break", "if", "(", "not", "found", ")", ":", "lres", ".", "append", "(", "longest", ")", "tmp", "=", "array", "(", "res", ")", "if", "useMedian", ":", "resx", "=", "median", "(", "tmp", ",", "axis", "=", "0", ")", "[", "1", ":", "]", "else", ":", "resx", "=", "(", "sum", "(", "tmp", ",", "axis", "=", "0", ")", "[", "1", ":", "]", "/", "float", "(", "numLists", ")", ")", "return", "(", "resx", "*", "batchSize", ")" ]
determine the average number of steps to reach a certain value .
train
false
38,357
def rebuild_tree(doctype, parent_field): frappe.db.auto_commit_on_many_writes = 1 right = 1 result = frappe.db.sql((u"SELECT name FROM `tab%s` WHERE `%s`='' or `%s` IS NULL ORDER BY name ASC" % (doctype, parent_field, parent_field))) for r in result: right = rebuild_node(doctype, r[0], right, parent_field) frappe.db.auto_commit_on_many_writes = 0
[ "def", "rebuild_tree", "(", "doctype", ",", "parent_field", ")", ":", "frappe", ".", "db", ".", "auto_commit_on_many_writes", "=", "1", "right", "=", "1", "result", "=", "frappe", ".", "db", ".", "sql", "(", "(", "u\"SELECT name FROM `tab%s` WHERE `%s`='' or `%s` IS NULL ORDER BY name ASC\"", "%", "(", "doctype", ",", "parent_field", ",", "parent_field", ")", ")", ")", "for", "r", "in", "result", ":", "right", "=", "rebuild_node", "(", "doctype", ",", "r", "[", "0", "]", ",", "right", ",", "parent_field", ")", "frappe", ".", "db", ".", "auto_commit_on_many_writes", "=", "0" ]
call rebuild_node for all root nodes .
train
false
38,358
@treeio_login_required @handle_response_format def event_view(request, event_id, response_format='html'): event = get_object_or_404(Event, pk=event_id) if (not request.user.profile.has_permission(event)): return user_denied(request, message="You don't have access to this Event") return render_to_response('events/event_view', {'event': event}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "event_view", "(", "request", ",", "event_id", ",", "response_format", "=", "'html'", ")", ":", "event", "=", "get_object_or_404", "(", "Event", ",", "pk", "=", "event_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "event", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Event\"", ")", "return", "render_to_response", "(", "'events/event_view'", ",", "{", "'event'", ":", "event", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
event view .
train
false
38,359
def _verify_query_segregation(query, auth_project=None): auth_project = (auth_project or rbac.get_limited_to_project(pecan.request.headers)) if (not auth_project): return for q in query: if ((q.field in ('project', 'project_id')) and (auth_project != q.value)): raise base.ProjectNotAuthorized(q.value)
[ "def", "_verify_query_segregation", "(", "query", ",", "auth_project", "=", "None", ")", ":", "auth_project", "=", "(", "auth_project", "or", "rbac", ".", "get_limited_to_project", "(", "pecan", ".", "request", ".", "headers", ")", ")", "if", "(", "not", "auth_project", ")", ":", "return", "for", "q", "in", "query", ":", "if", "(", "(", "q", ".", "field", "in", "(", "'project'", ",", "'project_id'", ")", ")", "and", "(", "auth_project", "!=", "q", ".", "value", ")", ")", ":", "raise", "base", ".", "ProjectNotAuthorized", "(", "q", ".", "value", ")" ]
ensure non-admin queries are not constrained to another project .
train
false
38,361
def test_b_wien(): from .. import b_wien from ... import units as u t = (5778 * u.K) w = (b_wien / t).to(u.nm) assert (round(w.value) == 502)
[ "def", "test_b_wien", "(", ")", ":", "from", ".", ".", "import", "b_wien", "from", "...", "import", "units", "as", "u", "t", "=", "(", "5778", "*", "u", ".", "K", ")", "w", "=", "(", "b_wien", "/", "t", ")", ".", "to", "(", "u", ".", "nm", ")", "assert", "(", "round", "(", "w", ".", "value", ")", "==", "502", ")" ]
b_wien should give the correct peak wavelength for given blackbody temperature .
train
false
38,363
def verifyThirdPartyFile(url, checksum, fname): name = os.path.basename(fname) if os.path.exists(fname): print ('Using local copy of %s' % (name,)) else: print ('Did not find local copy of %s' % (name,)) print ('Downloading %s' % (name,)) downloadURL(url, fname) print ('Archive for %s stored as %s' % (name, fname)) if os.system(('MD5=$(openssl md5 %s) ; test "${MD5##*= }" = "%s"' % (shellQuote(fname), checksum))): fatal(('MD5 checksum mismatch for file %s' % fname))
[ "def", "verifyThirdPartyFile", "(", "url", ",", "checksum", ",", "fname", ")", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", "if", "os", ".", "path", ".", "exists", "(", "fname", ")", ":", "print", "(", "'Using local copy of %s'", "%", "(", "name", ",", ")", ")", "else", ":", "print", "(", "'Did not find local copy of %s'", "%", "(", "name", ",", ")", ")", "print", "(", "'Downloading %s'", "%", "(", "name", ",", ")", ")", "downloadURL", "(", "url", ",", "fname", ")", "print", "(", "'Archive for %s stored as %s'", "%", "(", "name", ",", "fname", ")", ")", "if", "os", ".", "system", "(", "(", "'MD5=$(openssl md5 %s) ; test \"${MD5##*= }\" = \"%s\"'", "%", "(", "shellQuote", "(", "fname", ")", ",", "checksum", ")", ")", ")", ":", "fatal", "(", "(", "'MD5 checksum mismatch for file %s'", "%", "fname", ")", ")" ]
download file from url to filename fname if it does not already exist .
train
false
38,364
@register.assignment_tag(takes_context=True) def assignment_params_and_context(context, arg): return ('assignment_params_and_context - Expected result (context value: %s): %s' % (context['value'], arg))
[ "@", "register", ".", "assignment_tag", "(", "takes_context", "=", "True", ")", "def", "assignment_params_and_context", "(", "context", ",", "arg", ")", ":", "return", "(", "'assignment_params_and_context - Expected result (context value: %s): %s'", "%", "(", "context", "[", "'value'", "]", ",", "arg", ")", ")" ]
expected assignment_params_and_context __doc__ .
train
false
38,365
def get_current_server_name(): server = _split_version_id(os.environ['CURRENT_VERSION_ID'])[0] if (not server): return 'default' return server
[ "def", "get_current_server_name", "(", ")", ":", "server", "=", "_split_version_id", "(", "os", ".", "environ", "[", "'CURRENT_VERSION_ID'", "]", ")", "[", "0", "]", "if", "(", "not", "server", ")", ":", "return", "'default'", "return", "server" ]
returns the server name of the current instance .
train
false
38,368
def search_upnp_device(): return defer.maybeDeferred(UPnPProtocol().search_device)
[ "def", "search_upnp_device", "(", ")", ":", "return", "defer", ".", "maybeDeferred", "(", "UPnPProtocol", "(", ")", ".", "search_device", ")" ]
check the network for an upnp device .
train
false
38,370
def get_object_info(env, app, path=None, swift_source=None): (version, account, container, obj) = split_path((path or env['PATH_INFO']), 4, 4, True) info = _get_object_info(app, env, account, container, obj, swift_source=swift_source) if info: info = deepcopy(info) else: info = headers_to_object_info({}, 0) for field in ('length',): if (info.get(field) is None): info[field] = 0 else: info[field] = int(info[field]) return info
[ "def", "get_object_info", "(", "env", ",", "app", ",", "path", "=", "None", ",", "swift_source", "=", "None", ")", ":", "(", "version", ",", "account", ",", "container", ",", "obj", ")", "=", "split_path", "(", "(", "path", "or", "env", "[", "'PATH_INFO'", "]", ")", ",", "4", ",", "4", ",", "True", ")", "info", "=", "_get_object_info", "(", "app", ",", "env", ",", "account", ",", "container", ",", "obj", ",", "swift_source", "=", "swift_source", ")", "if", "info", ":", "info", "=", "deepcopy", "(", "info", ")", "else", ":", "info", "=", "headers_to_object_info", "(", "{", "}", ",", "0", ")", "for", "field", "in", "(", "'length'", ",", ")", ":", "if", "(", "info", ".", "get", "(", "field", ")", "is", "None", ")", ":", "info", "[", "field", "]", "=", "0", "else", ":", "info", "[", "field", "]", "=", "int", "(", "info", "[", "field", "]", ")", "return", "info" ]
get the info structure for an object .
train
false
38,371
def test_swap_curly(): for str in ('foo', u('foo foo foo bar'), 'foo b\xc3\xa9\xc3\xa8 b\xc2\xa1\xc3\xb0/\xc4\xb3\xc9\x99\xcb\x98\xc2\xa9\xc3\xbe\xc3\x9f\xc2\xae~\xc2\xaf\xc3\xa6', u('foo b\xc3\xa9\xc3\xa8 b\xc2\xa1\xc3\xb0/\xc4\xb3\xc9\x99\xcb\x98\xc2\xa9\xc3\xbe\xc3\x9f\xc2\xae~\xc2\xaf\xc3\xa6')): assert (_swap_curly(str) == str) assert (_swap_curly('foo{bar}baz') == 'foo{{bar}}baz') assert (_swap_curly('foo{{bar}}baz') == 'foo{bar}baz') assert (_swap_curly('{foo}{{bar}}{baz}') == '{{foo}}{bar}{{baz}}') assert (_swap_curly('{foo}{{{bar}}}{baz}') == '{{foo}}{{{bar}}}{{baz}}') assert (_swap_curly('foo{ bar }baz') == 'foo{{ bar }}baz') assert (_swap_curly('foo{ bar}baz') == 'foo{{ bar}}baz') assert (_swap_curly('foo{bar }baz') == 'foo{{bar }}baz') assert (_swap_curly('foo{{ bar }}baz') == 'foo{bar}baz') assert (_swap_curly('foo{{bar }}baz') == 'foo{bar}baz') assert (_swap_curly('foo{{ bar}}baz') == 'foo{bar}baz')
[ "def", "test_swap_curly", "(", ")", ":", "for", "str", "in", "(", "'foo'", ",", "u", "(", "'foo foo foo bar'", ")", ",", "'foo b\\xc3\\xa9\\xc3\\xa8 b\\xc2\\xa1\\xc3\\xb0/\\xc4\\xb3\\xc9\\x99\\xcb\\x98\\xc2\\xa9\\xc3\\xbe\\xc3\\x9f\\xc2\\xae~\\xc2\\xaf\\xc3\\xa6'", ",", "u", "(", "'foo b\\xc3\\xa9\\xc3\\xa8 b\\xc2\\xa1\\xc3\\xb0/\\xc4\\xb3\\xc9\\x99\\xcb\\x98\\xc2\\xa9\\xc3\\xbe\\xc3\\x9f\\xc2\\xae~\\xc2\\xaf\\xc3\\xa6'", ")", ")", ":", "assert", "(", "_swap_curly", "(", "str", ")", "==", "str", ")", "assert", "(", "_swap_curly", "(", "'foo{bar}baz'", ")", "==", "'foo{{bar}}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{{bar}}baz'", ")", "==", "'foo{bar}baz'", ")", "assert", "(", "_swap_curly", "(", "'{foo}{{bar}}{baz}'", ")", "==", "'{{foo}}{bar}{{baz}}'", ")", "assert", "(", "_swap_curly", "(", "'{foo}{{{bar}}}{baz}'", ")", "==", "'{{foo}}{{{bar}}}{{baz}}'", ")", "assert", "(", "_swap_curly", "(", "'foo{ bar }baz'", ")", "==", "'foo{{ bar }}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{ bar}baz'", ")", "==", "'foo{{ bar}}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{bar }baz'", ")", "==", "'foo{{bar }}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{{ bar }}baz'", ")", "==", "'foo{bar}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{{bar }}baz'", ")", "==", "'foo{bar}baz'", ")", "assert", "(", "_swap_curly", "(", "'foo{{ bar}}baz'", ")", "==", "'foo{bar}baz'", ")" ]
test swap curly function .
train
false
38,372
def ParseKeyFilteredQuery(filters, orders): remaining_filters = [] key_range = ValueRange() key_prop = datastore_types.KEY_SPECIAL_PROPERTY for f in filters: op = f.op() if (not ((f.property_size() == 1) and (f.property(0).name() == key_prop) and (not ((op == datastore_pb.Query_Filter.IN) or (op == datastore_pb.Query_Filter.EXISTS))))): remaining_filters.append(f) continue val = f.property(0).value() Check(val.has_referencevalue(), '__key__ kind must be compared to a key') limit = datastore_types.FromReferenceProperty(val) key_range.Update(op, limit) remaining_orders = [] for o in orders: if (not ((o.direction() == datastore_pb.Query_Order.ASCENDING) and (o.property() == datastore_types.KEY_SPECIAL_PROPERTY))): remaining_orders.append(o) else: break Check((not remaining_filters), (('Only comparison filters on ' + key_prop) + ' supported')) Check((not remaining_orders), (('Only ascending order on ' + key_prop) + ' supported')) return key_range
[ "def", "ParseKeyFilteredQuery", "(", "filters", ",", "orders", ")", ":", "remaining_filters", "=", "[", "]", "key_range", "=", "ValueRange", "(", ")", "key_prop", "=", "datastore_types", ".", "KEY_SPECIAL_PROPERTY", "for", "f", "in", "filters", ":", "op", "=", "f", ".", "op", "(", ")", "if", "(", "not", "(", "(", "f", ".", "property_size", "(", ")", "==", "1", ")", "and", "(", "f", ".", "property", "(", "0", ")", ".", "name", "(", ")", "==", "key_prop", ")", "and", "(", "not", "(", "(", "op", "==", "datastore_pb", ".", "Query_Filter", ".", "IN", ")", "or", "(", "op", "==", "datastore_pb", ".", "Query_Filter", ".", "EXISTS", ")", ")", ")", ")", ")", ":", "remaining_filters", ".", "append", "(", "f", ")", "continue", "val", "=", "f", ".", "property", "(", "0", ")", ".", "value", "(", ")", "Check", "(", "val", ".", "has_referencevalue", "(", ")", ",", "'__key__ kind must be compared to a key'", ")", "limit", "=", "datastore_types", ".", "FromReferenceProperty", "(", "val", ")", "key_range", ".", "Update", "(", "op", ",", "limit", ")", "remaining_orders", "=", "[", "]", "for", "o", "in", "orders", ":", "if", "(", "not", "(", "(", "o", ".", "direction", "(", ")", "==", "datastore_pb", ".", "Query_Order", ".", "ASCENDING", ")", "and", "(", "o", ".", "property", "(", ")", "==", "datastore_types", ".", "KEY_SPECIAL_PROPERTY", ")", ")", ")", ":", "remaining_orders", ".", "append", "(", "o", ")", "else", ":", "break", "Check", "(", "(", "not", "remaining_filters", ")", ",", "(", "(", "'Only comparison filters on '", "+", "key_prop", ")", "+", "' supported'", ")", ")", "Check", "(", "(", "not", "remaining_orders", ")", ",", "(", "(", "'Only ascending order on '", "+", "key_prop", ")", "+", "' supported'", ")", ")", "return", "key_range" ]
parse queries which only allow filters and ascending-orders on __key__ .
train
false
38,373
def modelform_factory(model, form=ModelForm, fields=None, exclude=None, formfield_callback=None, widgets=None): attrs = {u'model': model} if (fields is not None): attrs[u'fields'] = fields if (exclude is not None): attrs[u'exclude'] = exclude if (widgets is not None): attrs[u'widgets'] = widgets parent = (object,) if hasattr(form, u'Meta'): parent = (form.Meta, object) Meta = type(str(u'Meta'), parent, attrs) class_name = (model.__name__ + str(u'Form')) form_class_attrs = {u'Meta': Meta, u'formfield_callback': formfield_callback} return type(form)(class_name, (form,), form_class_attrs)
[ "def", "modelform_factory", "(", "model", ",", "form", "=", "ModelForm", ",", "fields", "=", "None", ",", "exclude", "=", "None", ",", "formfield_callback", "=", "None", ",", "widgets", "=", "None", ")", ":", "attrs", "=", "{", "u'model'", ":", "model", "}", "if", "(", "fields", "is", "not", "None", ")", ":", "attrs", "[", "u'fields'", "]", "=", "fields", "if", "(", "exclude", "is", "not", "None", ")", ":", "attrs", "[", "u'exclude'", "]", "=", "exclude", "if", "(", "widgets", "is", "not", "None", ")", ":", "attrs", "[", "u'widgets'", "]", "=", "widgets", "parent", "=", "(", "object", ",", ")", "if", "hasattr", "(", "form", ",", "u'Meta'", ")", ":", "parent", "=", "(", "form", ".", "Meta", ",", "object", ")", "Meta", "=", "type", "(", "str", "(", "u'Meta'", ")", ",", "parent", ",", "attrs", ")", "class_name", "=", "(", "model", ".", "__name__", "+", "str", "(", "u'Form'", ")", ")", "form_class_attrs", "=", "{", "u'Meta'", ":", "Meta", ",", "u'formfield_callback'", ":", "formfield_callback", "}", "return", "type", "(", "form", ")", "(", "class_name", ",", "(", "form", ",", ")", ",", "form_class_attrs", ")" ]
returns a modelform containing form fields for the given model .
train
false
38,374
def getMembership(config, user): seen = set() for member_of in _getMembership(config, user, seen): (yield member_of) (yield 'all')
[ "def", "getMembership", "(", "config", ",", "user", ")", ":", "seen", "=", "set", "(", ")", "for", "member_of", "in", "_getMembership", "(", "config", ",", "user", ",", "seen", ")", ":", "(", "yield", "member_of", ")", "(", "yield", "'all'", ")" ]
generate groups user is member of .
train
false
38,375
def execute_with_nm_v3(*cmd, **kwargs): funcs = {test_data.sensor_status_cmd: get_sensor_status_init, test_data.init_sensor_cmd: init_sensor_agent, test_data.sdr_dump_cmd: sdr_dump, test_data.nm_version_cmd: get_nm_version_v3} return _execute(funcs, *cmd, **kwargs)
[ "def", "execute_with_nm_v3", "(", "*", "cmd", ",", "**", "kwargs", ")", ":", "funcs", "=", "{", "test_data", ".", "sensor_status_cmd", ":", "get_sensor_status_init", ",", "test_data", ".", "init_sensor_cmd", ":", "init_sensor_agent", ",", "test_data", ".", "sdr_dump_cmd", ":", "sdr_dump", ",", "test_data", ".", "nm_version_cmd", ":", "get_nm_version_v3", "}", "return", "_execute", "(", "funcs", ",", "*", "cmd", ",", "**", "kwargs", ")" ]
test version of execute on node manager v3 .
train
false
38,377
def html_docs(): paths_to_try = ((u'html', u'index.html'), (u'_build', u'html', u'index.html')) for paths in paths_to_try: docdir = doc(*paths) if core.exists(docdir): return docdir return doc(u'git-cola.rst')
[ "def", "html_docs", "(", ")", ":", "paths_to_try", "=", "(", "(", "u'html'", ",", "u'index.html'", ")", ",", "(", "u'_build'", ",", "u'html'", ",", "u'index.html'", ")", ")", "for", "paths", "in", "paths_to_try", ":", "docdir", "=", "doc", "(", "*", "paths", ")", "if", "core", ".", "exists", "(", "docdir", ")", ":", "return", "docdir", "return", "doc", "(", "u'git-cola.rst'", ")" ]
return the path to the cola html documentation .
train
false
38,378
def Record(name, object): import gencache object = gencache.EnsureDispatch(object) module = sys.modules[object.__class__.__module__] package = gencache.GetModuleForTypelib(module.CLSID, module.LCID, module.MajorVersion, module.MinorVersion) try: struct_guid = package.RecordMap[name] except KeyError: raise ValueError(("The structure '%s' is not defined in module '%s'" % (name, package))) return pythoncom.GetRecordFromGuids(module.CLSID, module.MajorVersion, module.MinorVersion, module.LCID, struct_guid)
[ "def", "Record", "(", "name", ",", "object", ")", ":", "import", "gencache", "object", "=", "gencache", ".", "EnsureDispatch", "(", "object", ")", "module", "=", "sys", ".", "modules", "[", "object", ".", "__class__", ".", "__module__", "]", "package", "=", "gencache", ".", "GetModuleForTypelib", "(", "module", ".", "CLSID", ",", "module", ".", "LCID", ",", "module", ".", "MajorVersion", ",", "module", ".", "MinorVersion", ")", "try", ":", "struct_guid", "=", "package", ".", "RecordMap", "[", "name", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "\"The structure '%s' is not defined in module '%s'\"", "%", "(", "name", ",", "package", ")", ")", ")", "return", "pythoncom", ".", "GetRecordFromGuids", "(", "module", ".", "CLSID", ",", "module", ".", "MajorVersion", ",", "module", ".", "MinorVersion", ",", "module", ".", "LCID", ",", "struct_guid", ")" ]
creates a new record object .
train
false
38,380
def CollectObjectIDs(ids, obj): if (id(obj) in ids): return ids.add(id(obj)) if isinstance(obj, (list, tuple, set, frozenset)): for e in obj: CollectObjectIDs(ids, e) elif isinstance(obj, dict): for (k, v) in obj.items(): CollectObjectIDs(ids, k) CollectObjectIDs(ids, v) return len(ids)
[ "def", "CollectObjectIDs", "(", "ids", ",", "obj", ")", ":", "if", "(", "id", "(", "obj", ")", "in", "ids", ")", ":", "return", "ids", ".", "add", "(", "id", "(", "obj", ")", ")", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ",", "set", ",", "frozenset", ")", ")", ":", "for", "e", "in", "obj", ":", "CollectObjectIDs", "(", "ids", ",", "e", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "for", "(", "k", ",", "v", ")", "in", "obj", ".", "items", "(", ")", ":", "CollectObjectIDs", "(", "ids", ",", "k", ")", "CollectObjectIDs", "(", "ids", ",", "v", ")", "return", "len", "(", "ids", ")" ]
collect object ids seen in a structure .
train
false
38,381
def _priority_key(pep8_result): priority = [u'e701', u'e702', u'e225', u'e231', u'e201', u'e262'] middle_index = 10000 lowest_priority = [u'e501'] key = pep8_result[u'id'].lower() try: return priority.index(key) except ValueError: try: return ((middle_index + lowest_priority.index(key)) + 1) except ValueError: return middle_index
[ "def", "_priority_key", "(", "pep8_result", ")", ":", "priority", "=", "[", "u'e701'", ",", "u'e702'", ",", "u'e225'", ",", "u'e231'", ",", "u'e201'", ",", "u'e262'", "]", "middle_index", "=", "10000", "lowest_priority", "=", "[", "u'e501'", "]", "key", "=", "pep8_result", "[", "u'id'", "]", ".", "lower", "(", ")", "try", ":", "return", "priority", ".", "index", "(", "key", ")", "except", "ValueError", ":", "try", ":", "return", "(", "(", "middle_index", "+", "lowest_priority", ".", "index", "(", "key", ")", ")", "+", "1", ")", "except", "ValueError", ":", "return", "middle_index" ]
key for sorting pep8 results .
train
false
38,382
def get_program_types(user=None): catalog_integration = CatalogIntegration.current() if catalog_integration.enabled: user = _get_service_user(user, catalog_integration.service_username) if (not user): return [] api = create_catalog_api_client(user, catalog_integration) cache_key = '{base}.program_types'.format(base=catalog_integration.CACHE_KEY) return get_edx_api_data(catalog_integration, user, 'program_types', cache_key=(cache_key if catalog_integration.is_cache_enabled else None), api=api) else: return []
[ "def", "get_program_types", "(", "user", "=", "None", ")", ":", "catalog_integration", "=", "CatalogIntegration", ".", "current", "(", ")", "if", "catalog_integration", ".", "enabled", ":", "user", "=", "_get_service_user", "(", "user", ",", "catalog_integration", ".", "service_username", ")", "if", "(", "not", "user", ")", ":", "return", "[", "]", "api", "=", "create_catalog_api_client", "(", "user", ",", "catalog_integration", ")", "cache_key", "=", "'{base}.program_types'", ".", "format", "(", "base", "=", "catalog_integration", ".", "CACHE_KEY", ")", "return", "get_edx_api_data", "(", "catalog_integration", ",", "user", ",", "'program_types'", ",", "cache_key", "=", "(", "cache_key", "if", "catalog_integration", ".", "is_cache_enabled", "else", "None", ")", ",", "api", "=", "api", ")", "else", ":", "return", "[", "]" ]
retrieve all program types from the catalog service .
train
false
38,383
def parse_updates(rule): rules = shlex.split(rule) rules.pop(0) if (len(rules) > 0): return {'url': rules[0]} else: return True
[ "def", "parse_updates", "(", "rule", ")", ":", "rules", "=", "shlex", ".", "split", "(", "rule", ")", "rules", ".", "pop", "(", "0", ")", "if", "(", "len", "(", "rules", ")", ">", "0", ")", ":", "return", "{", "'url'", ":", "rules", "[", "0", "]", "}", "else", ":", "return", "True" ]
parse the updates line .
train
true
38,384
def s3_role_required(): T = current.T gtable = current.auth.settings.table_group represent = S3Represent(lookup='auth_group', fields=['role']) f = S3ReusableField('role_required', gtable, sortby='role', requires=IS_EMPTY_OR(IS_ONE_OF(current.db, 'auth_group.id', represent, zero=T('Public'))), represent=represent, label=T('Role Required'), comment=DIV(_class='tooltip', _title=('%s|%s' % (T('Role Required'), T('If this record should be restricted then select which role is required to access the record here.')))), ondelete='RESTRICT') return f()
[ "def", "s3_role_required", "(", ")", ":", "T", "=", "current", ".", "T", "gtable", "=", "current", ".", "auth", ".", "settings", ".", "table_group", "represent", "=", "S3Represent", "(", "lookup", "=", "'auth_group'", ",", "fields", "=", "[", "'role'", "]", ")", "f", "=", "S3ReusableField", "(", "'role_required'", ",", "gtable", ",", "sortby", "=", "'role'", ",", "requires", "=", "IS_EMPTY_OR", "(", "IS_ONE_OF", "(", "current", ".", "db", ",", "'auth_group.id'", ",", "represent", ",", "zero", "=", "T", "(", "'Public'", ")", ")", ")", ",", "represent", "=", "represent", ",", "label", "=", "T", "(", "'Role Required'", ")", ",", "comment", "=", "DIV", "(", "_class", "=", "'tooltip'", ",", "_title", "=", "(", "'%s|%s'", "%", "(", "T", "(", "'Role Required'", ")", ",", "T", "(", "'If this record should be restricted then select which role is required to access the record here.'", ")", ")", ")", ")", ",", "ondelete", "=", "'RESTRICT'", ")", "return", "f", "(", ")" ]
role required to access a resource - used by gis for map layer permissions management .
train
false
38,385
@pytest.mark.parametrize('env_id', test_envs) def test_smoke(env_id): gym.undo_logger_setup() logging.getLogger().setLevel(logging.INFO) env = gym.make(env_id) env = wrappers.Unvectorize(env) if os.environ.get('FORCE_LATEST_UNIVERSE_DOCKER_RUNTIMES'): configure_with_latest_docker_runtime_tag(env) else: env.configure(remotes=1) env.reset() _rollout(env, timestep_limit=(60 * 30))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'env_id'", ",", "test_envs", ")", "def", "test_smoke", "(", "env_id", ")", ":", "gym", ".", "undo_logger_setup", "(", ")", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "INFO", ")", "env", "=", "gym", ".", "make", "(", "env_id", ")", "env", "=", "wrappers", ".", "Unvectorize", "(", "env", ")", "if", "os", ".", "environ", ".", "get", "(", "'FORCE_LATEST_UNIVERSE_DOCKER_RUNTIMES'", ")", ":", "configure_with_latest_docker_runtime_tag", "(", "env", ")", "else", ":", "env", ".", "configure", "(", "remotes", "=", "1", ")", "env", ".", "reset", "(", ")", "_rollout", "(", "env", ",", "timestep_limit", "=", "(", "60", "*", "30", ")", ")" ]
check that environments start up without errors and that we can extract rewards and observations .
train
false
38,386
def fbthrift_library_config(append=None, **kwargs): blade_config.update_config('fbthrift_config', append, kwargs)
[ "def", "fbthrift_library_config", "(", "append", "=", "None", ",", "**", "kwargs", ")", ":", "blade_config", ".", "update_config", "(", "'fbthrift_config'", ",", "append", ",", "kwargs", ")" ]
fbthrift config .
train
false
38,387
def ThrottleRate(throttle_dict, max_count, time_period): now = GetCurrentTimestamp() if ((not throttle_dict) or (now >= (throttle_dict['start_time'] + time_period))): throttle_dict = {'start_time': now, 'count': 0} else: throttle_dict = {'start_time': throttle_dict['start_time'], 'count': throttle_dict['count']} if (throttle_dict['count'] >= max_count): return (throttle_dict, True) throttle_dict['count'] += 1 return (throttle_dict, False)
[ "def", "ThrottleRate", "(", "throttle_dict", ",", "max_count", ",", "time_period", ")", ":", "now", "=", "GetCurrentTimestamp", "(", ")", "if", "(", "(", "not", "throttle_dict", ")", "or", "(", "now", ">=", "(", "throttle_dict", "[", "'start_time'", "]", "+", "time_period", ")", ")", ")", ":", "throttle_dict", "=", "{", "'start_time'", ":", "now", ",", "'count'", ":", "0", "}", "else", ":", "throttle_dict", "=", "{", "'start_time'", ":", "throttle_dict", "[", "'start_time'", "]", ",", "'count'", ":", "throttle_dict", "[", "'count'", "]", "}", "if", "(", "throttle_dict", "[", "'count'", "]", ">=", "max_count", ")", ":", "return", "(", "throttle_dict", ",", "True", ")", "throttle_dict", "[", "'count'", "]", "+=", "1", "return", "(", "throttle_dict", ",", "False", ")" ]
throttle the rate at which occurrences of some event can occur .
train
false
38,388
def _seticon(object_alias, icondata): finder = _getfinder() args = {} attrs = {} aeobj_00 = aetypes.ObjectSpecifier(want=aetypes.Type('cobj'), form='alis', seld=object_alias, fr=None) aeobj_01 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('iimg'), fr=aeobj_00) args['----'] = aeobj_01 args['data'] = icondata (_reply, args, attrs) = finder.send('core', 'setd', args, attrs) if args.has_key('errn'): raise Error, aetools.decodeerror(args) if args.has_key('----'): return args['----'].data
[ "def", "_seticon", "(", "object_alias", ",", "icondata", ")", ":", "finder", "=", "_getfinder", "(", ")", "args", "=", "{", "}", "attrs", "=", "{", "}", "aeobj_00", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'cobj'", ")", ",", "form", "=", "'alis'", ",", "seld", "=", "object_alias", ",", "fr", "=", "None", ")", "aeobj_01", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'prop'", ")", ",", "form", "=", "'prop'", ",", "seld", "=", "aetypes", ".", "Type", "(", "'iimg'", ")", ",", "fr", "=", "aeobj_00", ")", "args", "[", "'----'", "]", "=", "aeobj_01", "args", "[", "'data'", "]", "=", "icondata", "(", "_reply", ",", "args", ",", "attrs", ")", "=", "finder", ".", "send", "(", "'core'", ",", "'setd'", ",", "args", ",", "attrs", ")", "if", "args", ".", "has_key", "(", "'errn'", ")", ":", "raise", "Error", ",", "aetools", ".", "decodeerror", "(", "args", ")", "if", "args", ".", "has_key", "(", "'----'", ")", ":", "return", "args", "[", "'----'", "]", ".", "data" ]
set the icondata for object .
train
false
38,389
def get_disk_size(path, format=None): size = images.qemu_img_info(path, format).virtual_size return int(size)
[ "def", "get_disk_size", "(", "path", ",", "format", "=", "None", ")", ":", "size", "=", "images", ".", "qemu_img_info", "(", "path", ",", "format", ")", ".", "virtual_size", "return", "int", "(", "size", ")" ]
get the size of a disk image .
train
false
38,390
def test_auto_close_us_after_open_task_is_deleted(data): data.task1.status = data.task_closed_status data.task1.save() data.task2.status = data.task_closed_status data.task2.save() assert (data.user_story1.is_closed is False) data.task3.delete() data.user_story1 = UserStory.objects.get(pk=data.user_story1.pk) assert (data.user_story1.is_closed is True)
[ "def", "test_auto_close_us_after_open_task_is_deleted", "(", "data", ")", ":", "data", ".", "task1", ".", "status", "=", "data", ".", "task_closed_status", "data", ".", "task1", ".", "save", "(", ")", "data", ".", "task2", ".", "status", "=", "data", ".", "task_closed_status", "data", ".", "task2", ".", "save", "(", ")", "assert", "(", "data", ".", "user_story1", ".", "is_closed", "is", "False", ")", "data", ".", "task3", ".", "delete", "(", ")", "data", ".", "user_story1", "=", "UserStory", ".", "objects", ".", "get", "(", "pk", "=", "data", ".", "user_story1", ".", "pk", ")", "assert", "(", "data", ".", "user_story1", ".", "is_closed", "is", "True", ")" ]
user story should be in closed state after delete the unique open task .
train
false
38,391
def add_data_sharing_consent_field(request, form_desc): enterprise_customer = get_enterprise_customer_for_request(request) required = data_sharing_consent_required_at_login(request) if (not data_sharing_consent_requested(request)): return label = _('I agree to allow {platform_name} to share data about my enrollment, completion and performance in all {platform_name} courses and programs where my enrollment is sponsored by {ec_name}.').format(platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME), ec_name=enterprise_customer.name) error_msg = _('To link your account with {ec_name}, you are required to consent to data sharing.').format(ec_name=enterprise_customer.name) form_desc.add_field('data_sharing_consent', label=label, field_type='checkbox', default=False, required=required, error_messages={'required': error_msg})
[ "def", "add_data_sharing_consent_field", "(", "request", ",", "form_desc", ")", ":", "enterprise_customer", "=", "get_enterprise_customer_for_request", "(", "request", ")", "required", "=", "data_sharing_consent_required_at_login", "(", "request", ")", "if", "(", "not", "data_sharing_consent_requested", "(", "request", ")", ")", ":", "return", "label", "=", "_", "(", "'I agree to allow {platform_name} to share data about my enrollment, completion and performance in all {platform_name} courses and programs where my enrollment is sponsored by {ec_name}.'", ")", ".", "format", "(", "platform_name", "=", "configuration_helpers", ".", "get_value", "(", "'PLATFORM_NAME'", ",", "settings", ".", "PLATFORM_NAME", ")", ",", "ec_name", "=", "enterprise_customer", ".", "name", ")", "error_msg", "=", "_", "(", "'To link your account with {ec_name}, you are required to consent to data sharing.'", ")", ".", "format", "(", "ec_name", "=", "enterprise_customer", ".", "name", ")", "form_desc", ".", "add_field", "(", "'data_sharing_consent'", ",", "label", "=", "label", ",", "field_type", "=", "'checkbox'", ",", "default", "=", "False", ",", "required", "=", "required", ",", "error_messages", "=", "{", "'required'", ":", "error_msg", "}", ")" ]
adds a checkbox field to be selected if the user consents to share data with the enterprisecustomer attached to the sso provider with which theyre authenticating .
train
false
38,392
def clone_via_serialize(obj): s = cPickle.dumps(obj, get_pickle_protocol()) return cPickle.loads(s)
[ "def", "clone_via_serialize", "(", "obj", ")", ":", "s", "=", "cPickle", ".", "dumps", "(", "obj", ",", "get_pickle_protocol", "(", ")", ")", "return", "cPickle", ".", "loads", "(", "s", ")" ]
makes a "deep copy" of an object by serializing it and then deserializing it .
train
true
38,393
def _ispow(e): return (isinstance(e, Expr) and (e.is_Pow or (e.func is exp)))
[ "def", "_ispow", "(", "e", ")", ":", "return", "(", "isinstance", "(", "e", ",", "Expr", ")", "and", "(", "e", ".", "is_Pow", "or", "(", "e", ".", "func", "is", "exp", ")", ")", ")" ]
return true if e is a pow or is exp .
train
false
38,394
def require_params(method, *required_keys): def decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): if (method == 'GET'): params = self.get_params elif (method == 'POST'): params = self.post_dict else: raise ValueError("Unsupported method '{method}'".format(method=method)) missing = [] for key in required_keys: if (params.get(key) is None): missing.append(key) if (len(missing) > 0): msg = 'Missing required key(s) {keys}'.format(keys=','.join(missing)) self.send_response(400, content=msg, headers={'Content-type': 'text/plain'}) else: return func(self, *args, **kwargs) return wrapper return decorator
[ "def", "require_params", "(", "method", ",", "*", "required_keys", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "method", "==", "'GET'", ")", ":", "params", "=", "self", ".", "get_params", "elif", "(", "method", "==", "'POST'", ")", ":", "params", "=", "self", ".", "post_dict", "else", ":", "raise", "ValueError", "(", "\"Unsupported method '{method}'\"", ".", "format", "(", "method", "=", "method", ")", ")", "missing", "=", "[", "]", "for", "key", "in", "required_keys", ":", "if", "(", "params", ".", "get", "(", "key", ")", "is", "None", ")", ":", "missing", ".", "append", "(", "key", ")", "if", "(", "len", "(", "missing", ")", ">", "0", ")", ":", "msg", "=", "'Missing required key(s) {keys}'", ".", "format", "(", "keys", "=", "','", ".", "join", "(", "missing", ")", ")", "self", ".", "send_response", "(", "400", ",", "content", "=", "msg", ",", "headers", "=", "{", "'Content-type'", ":", "'text/plain'", "}", ")", "else", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
ensures that the request contains all required parameters .
train
false
38,395
def get_header_from_yaml(lines): try: import yaml except ImportError: raise ImportError('`import yaml` failed, PyYAML package is required for ECSV format') from ..io.misc.yaml import AstropyLoader class TableLoader(AstropyLoader, ): '\n Custom Loader that constructs OrderedDict from an !!omap object.\n This does nothing but provide a namespace for adding the\n custom odict constructor.\n ' TableLoader.add_constructor(u'tag:yaml.org,2002:omap', _construct_odict) header_yaml = textwrap.dedent('\n'.join(lines)) try: header = yaml.load(header_yaml, Loader=TableLoader) except Exception as err: raise YamlParseError(str(err)) return header
[ "def", "get_header_from_yaml", "(", "lines", ")", ":", "try", ":", "import", "yaml", "except", "ImportError", ":", "raise", "ImportError", "(", "'`import yaml` failed, PyYAML package is required for ECSV format'", ")", "from", ".", ".", "io", ".", "misc", ".", "yaml", "import", "AstropyLoader", "class", "TableLoader", "(", "AstropyLoader", ",", ")", ":", "TableLoader", ".", "add_constructor", "(", "u'tag:yaml.org,2002:omap'", ",", "_construct_odict", ")", "header_yaml", "=", "textwrap", ".", "dedent", "(", "'\\n'", ".", "join", "(", "lines", ")", ")", "try", ":", "header", "=", "yaml", ".", "load", "(", "header_yaml", ",", "Loader", "=", "TableLoader", ")", "except", "Exception", "as", "err", ":", "raise", "YamlParseError", "(", "str", "(", "err", ")", ")", "return", "header" ]
get a header dict from input lines which should be valid yaml in the ecsv meta format .
train
false
38,396
def getInradiusByPaths(elementNode): return (0.5 * getExtentByPaths(elementNode))
[ "def", "getInradiusByPaths", "(", "elementNode", ")", ":", "return", "(", "0.5", "*", "getExtentByPaths", "(", "elementNode", ")", ")" ]
get inradius of the transformed paths of the xmlobject of the elementnode .
train
false
38,397
def validate_list_of_strict(object_list, object_types): for object_ in object_list: validate_type_strict(object_, object_types)
[ "def", "validate_list_of_strict", "(", "object_list", ",", "object_types", ")", ":", "for", "object_", "in", "object_list", ":", "validate_type_strict", "(", "object_", ",", "object_types", ")" ]
verifies that the items in object_list are of type object__type .
train
false
38,398
def _execute_types_in_stmt(evaluator, stmt): definitions = evaluator.eval_element(stmt) return chain.from_iterable((_execute_array_values(evaluator, d) for d in definitions))
[ "def", "_execute_types_in_stmt", "(", "evaluator", ",", "stmt", ")", ":", "definitions", "=", "evaluator", ".", "eval_element", "(", "stmt", ")", "return", "chain", ".", "from_iterable", "(", "(", "_execute_array_values", "(", "evaluator", ",", "d", ")", "for", "d", "in", "definitions", ")", ")" ]
executing all types or general elements that we find in a statement .
train
false
38,399
def safe_py_code(code): bads = ('import', ';', 'subprocess', 'eval', 'open', 'file', 'exec', 'input') for bad in bads: if code.count(bad): return False return True
[ "def", "safe_py_code", "(", "code", ")", ":", "bads", "=", "(", "'import'", ",", "';'", ",", "'subprocess'", ",", "'eval'", ",", "'open'", ",", "'file'", ",", "'exec'", ",", "'input'", ")", "for", "bad", "in", "bads", ":", "if", "code", ".", "count", "(", "bad", ")", ":", "return", "False", "return", "True" ]
check a string to see if it has any potentially unsafe routines which could be executed via python .
train
true
38,404
def _detect_global_scope(node, frame, defframe): def_scope = scope = None if (frame and frame.parent): scope = frame.parent.scope() if (defframe and defframe.parent): def_scope = defframe.parent.scope() if isinstance(frame, astroid.Function): if (not isinstance(node.parent, (astroid.Function, astroid.Arguments))): return False elif any(((not isinstance(f, (astroid.Class, astroid.Module))) for f in (frame, defframe))): return False break_scopes = [] for s in (scope, def_scope): parent_scope = s while parent_scope: if (not isinstance(parent_scope, (astroid.Class, astroid.Module))): break_scopes.append(parent_scope) break if parent_scope.parent: parent_scope = parent_scope.parent.scope() else: break if (break_scopes and (len(set(break_scopes)) != 1)): return False return (frame.lineno < defframe.lineno)
[ "def", "_detect_global_scope", "(", "node", ",", "frame", ",", "defframe", ")", ":", "def_scope", "=", "scope", "=", "None", "if", "(", "frame", "and", "frame", ".", "parent", ")", ":", "scope", "=", "frame", ".", "parent", ".", "scope", "(", ")", "if", "(", "defframe", "and", "defframe", ".", "parent", ")", ":", "def_scope", "=", "defframe", ".", "parent", ".", "scope", "(", ")", "if", "isinstance", "(", "frame", ",", "astroid", ".", "Function", ")", ":", "if", "(", "not", "isinstance", "(", "node", ".", "parent", ",", "(", "astroid", ".", "Function", ",", "astroid", ".", "Arguments", ")", ")", ")", ":", "return", "False", "elif", "any", "(", "(", "(", "not", "isinstance", "(", "f", ",", "(", "astroid", ".", "Class", ",", "astroid", ".", "Module", ")", ")", ")", "for", "f", "in", "(", "frame", ",", "defframe", ")", ")", ")", ":", "return", "False", "break_scopes", "=", "[", "]", "for", "s", "in", "(", "scope", ",", "def_scope", ")", ":", "parent_scope", "=", "s", "while", "parent_scope", ":", "if", "(", "not", "isinstance", "(", "parent_scope", ",", "(", "astroid", ".", "Class", ",", "astroid", ".", "Module", ")", ")", ")", ":", "break_scopes", ".", "append", "(", "parent_scope", ")", "break", "if", "parent_scope", ".", "parent", ":", "parent_scope", "=", "parent_scope", ".", "parent", ".", "scope", "(", ")", "else", ":", "break", "if", "(", "break_scopes", "and", "(", "len", "(", "set", "(", "break_scopes", ")", ")", "!=", "1", ")", ")", ":", "return", "False", "return", "(", "frame", ".", "lineno", "<", "defframe", ".", "lineno", ")" ]
detect that the given frames shares a global scope .
train
true
38,405
def ensure_utc(time, tz='UTC'): if (not time.tzinfo): time = time.replace(tzinfo=pytz.timezone(tz)) return time.replace(tzinfo=pytz.utc)
[ "def", "ensure_utc", "(", "time", ",", "tz", "=", "'UTC'", ")", ":", "if", "(", "not", "time", ".", "tzinfo", ")", ":", "time", "=", "time", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "timezone", "(", "tz", ")", ")", "return", "time", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")" ]
normalize a time .
train
true
38,407
def get_in_process_course_actions(request): return [course for course in CourseRerunState.objects.find_all(exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED}, should_display=True) if has_studio_read_access(request.user, course.course_key)]
[ "def", "get_in_process_course_actions", "(", "request", ")", ":", "return", "[", "course", "for", "course", "in", "CourseRerunState", ".", "objects", ".", "find_all", "(", "exclude_args", "=", "{", "'state'", ":", "CourseRerunUIStateManager", ".", "State", ".", "SUCCEEDED", "}", ",", "should_display", "=", "True", ")", "if", "has_studio_read_access", "(", "request", ".", "user", ",", "course", ".", "course_key", ")", "]" ]
get all in-process course actions .
train
false
38,409
def eval_location(pymodule, offset): return eval_location2(pymodule, offset)[1]
[ "def", "eval_location", "(", "pymodule", ",", "offset", ")", ":", "return", "eval_location2", "(", "pymodule", ",", "offset", ")", "[", "1", "]" ]
find the pyname at the offset .
train
false
38,410
def _find_id3v1(fileobj): extra_read = 'APETAGEX'.index('TAG') try: fileobj.seek(((-128) - extra_read), 2) except IOError as e: if (e.errno == errno.EINVAL): fileobj.seek(0, 0) else: raise data = fileobj.read((128 + extra_read)) try: idx = data.index('TAG') except ValueError: return (None, 0) else: try: ape_idx = data.index('APETAGEX') except ValueError: pass else: if (idx == (ape_idx + extra_read)): return (None, 0) tag = ParseID3v1(data[idx:]) if (tag is None): return (None, 0) offset = (idx - len(data)) return (tag, offset)
[ "def", "_find_id3v1", "(", "fileobj", ")", ":", "extra_read", "=", "'APETAGEX'", ".", "index", "(", "'TAG'", ")", "try", ":", "fileobj", ".", "seek", "(", "(", "(", "-", "128", ")", "-", "extra_read", ")", ",", "2", ")", "except", "IOError", "as", "e", ":", "if", "(", "e", ".", "errno", "==", "errno", ".", "EINVAL", ")", ":", "fileobj", ".", "seek", "(", "0", ",", "0", ")", "else", ":", "raise", "data", "=", "fileobj", ".", "read", "(", "(", "128", "+", "extra_read", ")", ")", "try", ":", "idx", "=", "data", ".", "index", "(", "'TAG'", ")", "except", "ValueError", ":", "return", "(", "None", ",", "0", ")", "else", ":", "try", ":", "ape_idx", "=", "data", ".", "index", "(", "'APETAGEX'", ")", "except", "ValueError", ":", "pass", "else", ":", "if", "(", "idx", "==", "(", "ape_idx", "+", "extra_read", ")", ")", ":", "return", "(", "None", ",", "0", ")", "tag", "=", "ParseID3v1", "(", "data", "[", "idx", ":", "]", ")", "if", "(", "tag", "is", "None", ")", ":", "return", "(", "None", ",", "0", ")", "offset", "=", "(", "idx", "-", "len", "(", "data", ")", ")", "return", "(", "tag", ",", "offset", ")" ]
returns a tuple of or offset mainly because we used to write too short tags in some cases and we need the offset to delete them .
train
false
38,411
def parse_float4(text): texts = [x for x in text.split(' ') if (x.strip() != '')] value = list(map(parse_float, texts)) if (len(value) < 1): raise Exception(('Invalid float4 format: %s' % text)) elif (len(value) == 1): return [value[0] for x in range(4)] elif (len(value) == 2): return [value[0], value[1], value[0], value[1]] elif (len(value) == 3): return [value[0], value[1], value[0], value[2]] elif (len(value) > 4): raise Exception(('Too many values in %s' % text)) return value
[ "def", "parse_float4", "(", "text", ")", ":", "texts", "=", "[", "x", "for", "x", "in", "text", ".", "split", "(", "' '", ")", "if", "(", "x", ".", "strip", "(", ")", "!=", "''", ")", "]", "value", "=", "list", "(", "map", "(", "parse_float", ",", "texts", ")", ")", "if", "(", "len", "(", "value", ")", "<", "1", ")", ":", "raise", "Exception", "(", "(", "'Invalid float4 format: %s'", "%", "text", ")", ")", "elif", "(", "len", "(", "value", ")", "==", "1", ")", ":", "return", "[", "value", "[", "0", "]", "for", "x", "in", "range", "(", "4", ")", "]", "elif", "(", "len", "(", "value", ")", "==", "2", ")", ":", "return", "[", "value", "[", "0", "]", ",", "value", "[", "1", "]", ",", "value", "[", "0", "]", ",", "value", "[", "1", "]", "]", "elif", "(", "len", "(", "value", ")", "==", "3", ")", ":", "return", "[", "value", "[", "0", "]", ",", "value", "[", "1", "]", ",", "value", "[", "0", "]", ",", "value", "[", "2", "]", "]", "elif", "(", "len", "(", "value", ")", ">", "4", ")", ":", "raise", "Exception", "(", "(", "'Too many values in %s'", "%", "text", ")", ")", "return", "value" ]
parse a string to a list of exactly 4 floats .
train
false
38,412
def getGCodeFilesWhichAreNotLogFiles(): return archive.getFilesWithFileTypeWithoutWords('gcode', ['_log'])
[ "def", "getGCodeFilesWhichAreNotLogFiles", "(", ")", ":", "return", "archive", ".", "getFilesWithFileTypeWithoutWords", "(", "'gcode'", ",", "[", "'_log'", "]", ")" ]
get gcode files which are not log files .
train
false
38,413
def strip_newsgroup_quoting(text): good_lines = [line for line in text.split('\n') if (not _QUOTE_RE.search(line))] return '\n'.join(good_lines)
[ "def", "strip_newsgroup_quoting", "(", "text", ")", ":", "good_lines", "=", "[", "line", "for", "line", "in", "text", ".", "split", "(", "'\\n'", ")", "if", "(", "not", "_QUOTE_RE", ".", "search", "(", "line", ")", ")", "]", "return", "'\\n'", ".", "join", "(", "good_lines", ")" ]
given text in "news" format .
train
false
38,414
def _PRE(text): return ('<pre>%s</pre>' % (escape(text),))
[ "def", "_PRE", "(", "text", ")", ":", "return", "(", "'<pre>%s</pre>'", "%", "(", "escape", "(", "text", ")", ",", ")", ")" ]
wraps <pre> tags around some text and html-escape it .
train
false
38,415
def set_system_time(newtime, utc_offset=None): fmts = ['%I:%M:%S %p', '%I:%M %p', '%H:%M:%S', '%H:%M'] dt_obj = _try_parse_datetime(newtime, fmts) if (dt_obj is None): return False return set_system_date_time(hours=dt_obj.hour, minutes=dt_obj.minute, seconds=dt_obj.second, utc_offset=utc_offset)
[ "def", "set_system_time", "(", "newtime", ",", "utc_offset", "=", "None", ")", ":", "fmts", "=", "[", "'%I:%M:%S %p'", ",", "'%I:%M %p'", ",", "'%H:%M:%S'", ",", "'%H:%M'", "]", "dt_obj", "=", "_try_parse_datetime", "(", "newtime", ",", "fmts", ")", "if", "(", "dt_obj", "is", "None", ")", ":", "return", "False", "return", "set_system_date_time", "(", "hours", "=", "dt_obj", ".", "hour", ",", "minutes", "=", "dt_obj", ".", "minute", ",", "seconds", "=", "dt_obj", ".", "second", ",", "utc_offset", "=", "utc_offset", ")" ]
set the system time .
train
true
38,416
def mkSuccess(endpoint, q): signed_list = [('openid.' + k) for k in q.keys()] return SuccessResponse(endpoint, Message.fromOpenIDArgs(q), signed_list)
[ "def", "mkSuccess", "(", "endpoint", ",", "q", ")", ":", "signed_list", "=", "[", "(", "'openid.'", "+", "k", ")", "for", "k", "in", "q", ".", "keys", "(", ")", "]", "return", "SuccessResponse", "(", "endpoint", ",", "Message", ".", "fromOpenIDArgs", "(", "q", ")", ",", "signed_list", ")" ]
convenience function to create a successresponse with the given arguments .
train
false
38,417
def save_upload(uploaded, filename, raw_data): try: from io import FileIO, BufferedWriter with BufferedWriter(FileIO(filename, 'wb')) as dest: if raw_data: if isinstance(uploaded, basestring): dest.write(uploaded) else: foo = uploaded.read(1024) while foo: dest.write(foo) foo = uploaded.read(1024) else: for c in uploaded.chunks(): dest.write(c) return True except IOError: pass return False
[ "def", "save_upload", "(", "uploaded", ",", "filename", ",", "raw_data", ")", ":", "try", ":", "from", "io", "import", "FileIO", ",", "BufferedWriter", "with", "BufferedWriter", "(", "FileIO", "(", "filename", ",", "'wb'", ")", ")", "as", "dest", ":", "if", "raw_data", ":", "if", "isinstance", "(", "uploaded", ",", "basestring", ")", ":", "dest", ".", "write", "(", "uploaded", ")", "else", ":", "foo", "=", "uploaded", ".", "read", "(", "1024", ")", "while", "foo", ":", "dest", ".", "write", "(", "foo", ")", "foo", "=", "uploaded", ".", "read", "(", "1024", ")", "else", ":", "for", "c", "in", "uploaded", ".", "chunks", "(", ")", ":", "dest", ".", "write", "(", "c", ")", "return", "True", "except", "IOError", ":", "pass", "return", "False" ]
raw_data: if true .
train
false
38,419
def process_autosummary_toc(app, doctree): env = app.builder.env crawled = {} def crawl_toc(node, depth=1): crawled[node] = True for (j, subnode) in enumerate(node): try: if (isinstance(subnode, autosummary_toc) and isinstance(subnode[0], addnodes.toctree)): env.note_toctree(env.docname, subnode[0]) continue except IndexError: continue if (not isinstance(subnode, nodes.section)): continue if (subnode not in crawled): crawl_toc(subnode, (depth + 1)) crawl_toc(doctree)
[ "def", "process_autosummary_toc", "(", "app", ",", "doctree", ")", ":", "env", "=", "app", ".", "builder", ".", "env", "crawled", "=", "{", "}", "def", "crawl_toc", "(", "node", ",", "depth", "=", "1", ")", ":", "crawled", "[", "node", "]", "=", "True", "for", "(", "j", ",", "subnode", ")", "in", "enumerate", "(", "node", ")", ":", "try", ":", "if", "(", "isinstance", "(", "subnode", ",", "autosummary_toc", ")", "and", "isinstance", "(", "subnode", "[", "0", "]", ",", "addnodes", ".", "toctree", ")", ")", ":", "env", ".", "note_toctree", "(", "env", ".", "docname", ",", "subnode", "[", "0", "]", ")", "continue", "except", "IndexError", ":", "continue", "if", "(", "not", "isinstance", "(", "subnode", ",", "nodes", ".", "section", ")", ")", ":", "continue", "if", "(", "subnode", "not", "in", "crawled", ")", ":", "crawl_toc", "(", "subnode", ",", "(", "depth", "+", "1", ")", ")", "crawl_toc", "(", "doctree", ")" ]
insert items described in autosummary:: to the toc tree .
train
true
38,420
def is_entrance_exams_enabled(): return settings.FEATURES.get('ENTRANCE_EXAMS')
[ "def", "is_entrance_exams_enabled", "(", ")", ":", "return", "settings", ".", "FEATURES", ".", "get", "(", "'ENTRANCE_EXAMS'", ")" ]
checks to see if the entrance exams feature is enabled use this operation instead of checking the feature flag all over the place .
train
false
38,421
@map_wrap def transfer_part(s3server, mp_id, mp_keyname, mp_bucketname, i, part): mp = mp_from_ids(s3server, mp_id, mp_keyname, mp_bucketname) with open(part) as t_handle: mp.upload_part_from_file(t_handle, (i + 1)) os.remove(part)
[ "@", "map_wrap", "def", "transfer_part", "(", "s3server", ",", "mp_id", ",", "mp_keyname", ",", "mp_bucketname", ",", "i", ",", "part", ")", ":", "mp", "=", "mp_from_ids", "(", "s3server", ",", "mp_id", ",", "mp_keyname", ",", "mp_bucketname", ")", "with", "open", "(", "part", ")", "as", "t_handle", ":", "mp", ".", "upload_part_from_file", "(", "t_handle", ",", "(", "i", "+", "1", ")", ")", "os", ".", "remove", "(", "part", ")" ]
transfer a part of a multipart upload .
train
false
38,422
def _script_names(dist, script_name, is_gui): if dist_in_usersite(dist): bin_dir = bin_user else: bin_dir = bin_py exe_name = os.path.join(bin_dir, script_name) paths_to_remove = [exe_name] if WINDOWS: paths_to_remove.append((exe_name + '.exe')) paths_to_remove.append((exe_name + '.exe.manifest')) if is_gui: paths_to_remove.append((exe_name + '-script.pyw')) else: paths_to_remove.append((exe_name + '-script.py')) return paths_to_remove
[ "def", "_script_names", "(", "dist", ",", "script_name", ",", "is_gui", ")", ":", "if", "dist_in_usersite", "(", "dist", ")", ":", "bin_dir", "=", "bin_user", "else", ":", "bin_dir", "=", "bin_py", "exe_name", "=", "os", ".", "path", ".", "join", "(", "bin_dir", ",", "script_name", ")", "paths_to_remove", "=", "[", "exe_name", "]", "if", "WINDOWS", ":", "paths_to_remove", ".", "append", "(", "(", "exe_name", "+", "'.exe'", ")", ")", "paths_to_remove", ".", "append", "(", "(", "exe_name", "+", "'.exe.manifest'", ")", ")", "if", "is_gui", ":", "paths_to_remove", ".", "append", "(", "(", "exe_name", "+", "'-script.pyw'", ")", ")", "else", ":", "paths_to_remove", ".", "append", "(", "(", "exe_name", "+", "'-script.py'", ")", ")", "return", "paths_to_remove" ]
create the fully qualified name of the files created by {console .
train
true
38,424
def get_expr_params(operator): if (operator.type == lo.PARAM): return operator.data.parameters() else: params = [] for arg in operator.args: params += get_expr_params(arg) if isinstance(operator.data, lo.LinOp): params += get_expr_params(operator.data) return params
[ "def", "get_expr_params", "(", "operator", ")", ":", "if", "(", "operator", ".", "type", "==", "lo", ".", "PARAM", ")", ":", "return", "operator", ".", "data", ".", "parameters", "(", ")", "else", ":", "params", "=", "[", "]", "for", "arg", "in", "operator", ".", "args", ":", "params", "+=", "get_expr_params", "(", "arg", ")", "if", "isinstance", "(", "operator", ".", "data", ",", "lo", ".", "LinOp", ")", ":", "params", "+=", "get_expr_params", "(", "operator", ".", "data", ")", "return", "params" ]
get a list of the parameters in the operator .
train
false
38,425
def cardOkay(): return True
[ "def", "cardOkay", "(", ")", ":", "return", "True" ]
not implemented: idea = returns string: okay .
train
false
38,427
def direct_get_suffix_hashes(node, part, suffixes, conn_timeout=5, response_timeout=15, headers=None): if (headers is None): headers = {} path = ('/%s' % '-'.join(suffixes)) with Timeout(conn_timeout): conn = http_connect(node['replication_ip'], node['replication_port'], node['device'], part, 'REPLICATE', path, headers=gen_headers(headers)) with Timeout(response_timeout): resp = conn.getresponse() if (not is_success(resp.status)): raise DirectClientException('Object', 'REPLICATE', node, part, path, resp, host={'ip': node['replication_ip'], 'port': node['replication_port']}) return pickle.loads(resp.read())
[ "def", "direct_get_suffix_hashes", "(", "node", ",", "part", ",", "suffixes", ",", "conn_timeout", "=", "5", ",", "response_timeout", "=", "15", ",", "headers", "=", "None", ")", ":", "if", "(", "headers", "is", "None", ")", ":", "headers", "=", "{", "}", "path", "=", "(", "'/%s'", "%", "'-'", ".", "join", "(", "suffixes", ")", ")", "with", "Timeout", "(", "conn_timeout", ")", ":", "conn", "=", "http_connect", "(", "node", "[", "'replication_ip'", "]", ",", "node", "[", "'replication_port'", "]", ",", "node", "[", "'device'", "]", ",", "part", ",", "'REPLICATE'", ",", "path", ",", "headers", "=", "gen_headers", "(", "headers", ")", ")", "with", "Timeout", "(", "response_timeout", ")", ":", "resp", "=", "conn", ".", "getresponse", "(", ")", "if", "(", "not", "is_success", "(", "resp", ".", "status", ")", ")", ":", "raise", "DirectClientException", "(", "'Object'", ",", "'REPLICATE'", ",", "node", ",", "part", ",", "path", ",", "resp", ",", "host", "=", "{", "'ip'", ":", "node", "[", "'replication_ip'", "]", ",", "'port'", ":", "node", "[", "'replication_port'", "]", "}", ")", "return", "pickle", ".", "loads", "(", "resp", ".", "read", "(", ")", ")" ]
get suffix hashes directly from the object server .
train
false
38,429
def test_deprecated_class_with_custom_metaclass(): with catch_warnings(AstropyDeprecationWarning) as w: TB() assert (len(w) == 1) assert (type(TB) is TMeta) assert (TB.metaclass_attr == 1)
[ "def", "test_deprecated_class_with_custom_metaclass", "(", ")", ":", "with", "catch_warnings", "(", "AstropyDeprecationWarning", ")", "as", "w", ":", "TB", "(", ")", "assert", "(", "len", "(", "w", ")", "==", "1", ")", "assert", "(", "type", "(", "TB", ")", "is", "TMeta", ")", "assert", "(", "TB", ".", "metaclass_attr", "==", "1", ")" ]
regression test for an issue where deprecating a class with a metaclass other than type did not restore the metaclass properly .
train
false
38,430
def get_datetime(index_timestamp, timestring): if ('%W' in timestring): timestring += '%w' index_timestamp += '1' elif ('%U' in timestring): timestring += '%w' index_timestamp += '1' elif ('%m' in timestring): if (not ('%d' in timestring)): timestring += '%d' index_timestamp += '1' return datetime.strptime(index_timestamp, timestring)
[ "def", "get_datetime", "(", "index_timestamp", ",", "timestring", ")", ":", "if", "(", "'%W'", "in", "timestring", ")", ":", "timestring", "+=", "'%w'", "index_timestamp", "+=", "'1'", "elif", "(", "'%U'", "in", "timestring", ")", ":", "timestring", "+=", "'%w'", "index_timestamp", "+=", "'1'", "elif", "(", "'%m'", "in", "timestring", ")", ":", "if", "(", "not", "(", "'%d'", "in", "timestring", ")", ")", ":", "timestring", "+=", "'%d'", "index_timestamp", "+=", "'1'", "return", "datetime", ".", "strptime", "(", "index_timestamp", ",", "timestring", ")" ]
return a datetime object .
train
false
38,431
def test_binary(method, prec, exp_range, restricted_range, itr, stat): if (method in BinaryRestricted): exp_range = restricted_range for op in all_binary(prec, exp_range, itr): t = TestSet(method, op) try: if (not convert(t)): continue callfuncs(t) verify(t, stat) except VerifyError as err: log(err) if (not method.startswith('__')): for op in binary_optarg(prec, exp_range, itr): t = TestSet(method, op) try: if (not convert(t)): continue callfuncs(t) verify(t, stat) except VerifyError as err: log(err)
[ "def", "test_binary", "(", "method", ",", "prec", ",", "exp_range", ",", "restricted_range", ",", "itr", ",", "stat", ")", ":", "if", "(", "method", "in", "BinaryRestricted", ")", ":", "exp_range", "=", "restricted_range", "for", "op", "in", "all_binary", "(", "prec", ",", "exp_range", ",", "itr", ")", ":", "t", "=", "TestSet", "(", "method", ",", "op", ")", "try", ":", "if", "(", "not", "convert", "(", "t", ")", ")", ":", "continue", "callfuncs", "(", "t", ")", "verify", "(", "t", ",", "stat", ")", "except", "VerifyError", "as", "err", ":", "log", "(", "err", ")", "if", "(", "not", "method", ".", "startswith", "(", "'__'", ")", ")", ":", "for", "op", "in", "binary_optarg", "(", "prec", ",", "exp_range", ",", "itr", ")", ":", "t", "=", "TestSet", "(", "method", ",", "op", ")", "try", ":", "if", "(", "not", "convert", "(", "t", ")", ")", ":", "continue", "callfuncs", "(", "t", ")", "verify", "(", "t", ",", "stat", ")", "except", "VerifyError", "as", "err", ":", "log", "(", "err", ")" ]
iterate a binary function through many test cases .
train
false
38,434
def list_roots(): ret = {} for saltenv in __opts__['pillar_roots']: ret[saltenv] = [] ret[saltenv].append(list_env(saltenv)) return ret
[ "def", "list_roots", "(", ")", ":", "ret", "=", "{", "}", "for", "saltenv", "in", "__opts__", "[", "'pillar_roots'", "]", ":", "ret", "[", "saltenv", "]", "=", "[", "]", "ret", "[", "saltenv", "]", ".", "append", "(", "list_env", "(", "saltenv", ")", ")", "return", "ret" ]
return all of the files names in all available environments .
train
true
38,436
@docfiller def convolve1d(input, weights, axis=(-1), output=None, mode='reflect', cval=0.0, origin=0): weights = weights[::(-1)] origin = (- origin) if (not (len(weights) & 1)): origin -= 1 return correlate1d(input, weights, axis, output, mode, cval, origin)
[ "@", "docfiller", "def", "convolve1d", "(", "input", ",", "weights", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ",", "origin", "=", "0", ")", ":", "weights", "=", "weights", "[", ":", ":", "(", "-", "1", ")", "]", "origin", "=", "(", "-", "origin", ")", "if", "(", "not", "(", "len", "(", "weights", ")", "&", "1", ")", ")", ":", "origin", "-=", "1", "return", "correlate1d", "(", "input", ",", "weights", ",", "axis", ",", "output", ",", "mode", ",", "cval", ",", "origin", ")" ]
calculate a one-dimensional convolution along the given axis .
train
false
38,438
@handle_response_format @treeio_login_required def field_delete(request, field_id, response_format='html'): field = get_object_or_404(ItemField, pk=field_id) if (not request.user.profile.has_permission(field, mode='w')): return user_denied(request, message="You don't have write access to this ItemField") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): field.trash = True field.save() else: field.delete() return HttpResponseRedirect(reverse('infrastructure_settings_view')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('infrastructure_field_view', args=[field.id])) context = _get_default_context(request) context.update({'field': field}) return render_to_response('infrastructure/field_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "field_delete", "(", "request", ",", "field_id", ",", "response_format", "=", "'html'", ")", ":", "field", "=", "get_object_or_404", "(", "ItemField", ",", "pk", "=", "field_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "field", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have write access to this ItemField\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "field", ".", "trash", "=", "True", "field", ".", "save", "(", ")", "else", ":", "field", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'infrastructure_settings_view'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'infrastructure_field_view'", ",", "args", "=", "[", "field", ".", "id", "]", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'field'", ":", "field", "}", ")", "return", "render_to_response", "(", "'infrastructure/field_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
itemfield delete page .
train
false
38,439
def getOpenIDStore(filestore_path, table_prefix): if (not settings.DATABASE_ENGINE): return FileOpenIDStore(filestore_path) connection.cursor() tablenames = {'associations_table': (table_prefix + 'openid_associations'), 'nonces_table': (table_prefix + 'openid_nonces')} types = {'postgresql': sqlstore.PostgreSQLStore, 'mysql': sqlstore.MySQLStore, 'sqlite3': sqlstore.SQLiteStore} try: s = types[settings.DATABASE_ENGINE](connection.connection, **tablenames) except KeyError: raise ImproperlyConfigured, ('Database engine %s not supported by OpenID library' % (settings.DATABASE_ENGINE,)) try: s.createTables() except (SystemExit, KeyboardInterrupt, MemoryError) as e: raise except: pass return s
[ "def", "getOpenIDStore", "(", "filestore_path", ",", "table_prefix", ")", ":", "if", "(", "not", "settings", ".", "DATABASE_ENGINE", ")", ":", "return", "FileOpenIDStore", "(", "filestore_path", ")", "connection", ".", "cursor", "(", ")", "tablenames", "=", "{", "'associations_table'", ":", "(", "table_prefix", "+", "'openid_associations'", ")", ",", "'nonces_table'", ":", "(", "table_prefix", "+", "'openid_nonces'", ")", "}", "types", "=", "{", "'postgresql'", ":", "sqlstore", ".", "PostgreSQLStore", ",", "'mysql'", ":", "sqlstore", ".", "MySQLStore", ",", "'sqlite3'", ":", "sqlstore", ".", "SQLiteStore", "}", "try", ":", "s", "=", "types", "[", "settings", ".", "DATABASE_ENGINE", "]", "(", "connection", ".", "connection", ",", "**", "tablenames", ")", "except", "KeyError", ":", "raise", "ImproperlyConfigured", ",", "(", "'Database engine %s not supported by OpenID library'", "%", "(", "settings", ".", "DATABASE_ENGINE", ",", ")", ")", "try", ":", "s", ".", "createTables", "(", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ",", "MemoryError", ")", "as", "e", ":", "raise", "except", ":", "pass", "return", "s" ]
return an openid store object fit for the currently-chosen database backend .
train
true
38,441
def mock_data_row(dim=1000, prob_nnz=0.5, lam=1.0): nnz = np.random.uniform(size=(dim,)) data = [(i, float((np.random.poisson(lam=lam) + 1.0))) for i in xrange(dim) if (nnz[i] < prob_nnz)] return data
[ "def", "mock_data_row", "(", "dim", "=", "1000", ",", "prob_nnz", "=", "0.5", ",", "lam", "=", "1.0", ")", ":", "nnz", "=", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "dim", ",", ")", ")", "data", "=", "[", "(", "i", ",", "float", "(", "(", "np", ".", "random", ".", "poisson", "(", "lam", "=", "lam", ")", "+", "1.0", ")", ")", ")", "for", "i", "in", "xrange", "(", "dim", ")", "if", "(", "nnz", "[", "i", "]", "<", "prob_nnz", ")", "]", "return", "data" ]
create a random gensim sparse vector .
train
false
38,442
def parse_gml(lines, label='label', destringizer=None): def decode_line(line): if isinstance(line, bytes): try: line.decode('ascii') except UnicodeDecodeError: raise NetworkXError('input is not ASCII-encoded') if (not isinstance(line, str)): line = str(line) return line def filter_lines(lines): if isinstance(lines, (str, unicode)): lines = decode_line(lines) lines = lines.splitlines() for line in lines: (yield line) else: for line in lines: line = decode_line(line) if (line and (line[(-1)] == '\n')): line = line[:(-1)] if (line.find('\n') != (-1)): raise NetworkXError('input line contains newline') (yield line) G = parse_gml_lines(filter_lines(lines), label, destringizer) return G
[ "def", "parse_gml", "(", "lines", ",", "label", "=", "'label'", ",", "destringizer", "=", "None", ")", ":", "def", "decode_line", "(", "line", ")", ":", "if", "isinstance", "(", "line", ",", "bytes", ")", ":", "try", ":", "line", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "raise", "NetworkXError", "(", "'input is not ASCII-encoded'", ")", "if", "(", "not", "isinstance", "(", "line", ",", "str", ")", ")", ":", "line", "=", "str", "(", "line", ")", "return", "line", "def", "filter_lines", "(", "lines", ")", ":", "if", "isinstance", "(", "lines", ",", "(", "str", ",", "unicode", ")", ")", ":", "lines", "=", "decode_line", "(", "lines", ")", "lines", "=", "lines", ".", "splitlines", "(", ")", "for", "line", "in", "lines", ":", "(", "yield", "line", ")", "else", ":", "for", "line", "in", "lines", ":", "line", "=", "decode_line", "(", "line", ")", "if", "(", "line", "and", "(", "line", "[", "(", "-", "1", ")", "]", "==", "'\\n'", ")", ")", ":", "line", "=", "line", "[", ":", "(", "-", "1", ")", "]", "if", "(", "line", ".", "find", "(", "'\\n'", ")", "!=", "(", "-", "1", ")", ")", ":", "raise", "NetworkXError", "(", "'input line contains newline'", ")", "(", "yield", "line", ")", "G", "=", "parse_gml_lines", "(", "filter_lines", "(", "lines", ")", ",", "label", ",", "destringizer", ")", "return", "G" ]
parse gml graph from a string or iterable .
train
false
38,443
def test_construction(): s3_deleter.Deleter()
[ "def", "test_construction", "(", ")", ":", "s3_deleter", ".", "Deleter", "(", ")" ]
the constructor basically works .
train
false
38,445
@click.command('set-ssl-key') @click.argument('site') @click.argument('ssl-certificate-key-path') def set_ssl_certificate_key(site, ssl_certificate_key_path): from bench.config.site_config import set_ssl_certificate_key set_ssl_certificate_key(site, ssl_certificate_key_path)
[ "@", "click", ".", "command", "(", "'set-ssl-key'", ")", "@", "click", ".", "argument", "(", "'site'", ")", "@", "click", ".", "argument", "(", "'ssl-certificate-key-path'", ")", "def", "set_ssl_certificate_key", "(", "site", ",", "ssl_certificate_key_path", ")", ":", "from", "bench", ".", "config", ".", "site_config", "import", "set_ssl_certificate_key", "set_ssl_certificate_key", "(", "site", ",", "ssl_certificate_key_path", ")" ]
set ssl certificate private key path for site .
train
false
38,446
def example_decorator(name, function): def wrapped_func(*args, **kwarg): CALLED_FUNCTION.append(name) return function(*args, **kwarg) return wrapped_func
[ "def", "example_decorator", "(", "name", ",", "function", ")", ":", "def", "wrapped_func", "(", "*", "args", ",", "**", "kwarg", ")", ":", "CALLED_FUNCTION", ".", "append", "(", "name", ")", "return", "function", "(", "*", "args", ",", "**", "kwarg", ")", "return", "wrapped_func" ]
decorator for notify which is used from utils .
train
false
38,448
def relative_distance_boolean(a_str, b_str, threshold=0.6): if (threshold == 0): return True elif (threshold == 1.0): return (a_str == b_str) if (len(b_str) < len(a_str)): (a_str, b_str) = (b_str, a_str) alen = len(a_str) blen = len(b_str) if ((blen == 0) or (alen == 0)): return (alen == blen) if ((blen == alen) and (a_str == b_str) and (threshold <= 1.0)): return True if (threshold > upper_bound_similarity(a_str, b_str)): return False else: return (threshold <= difflib.SequenceMatcher(None, a_str, b_str).quick_ratio())
[ "def", "relative_distance_boolean", "(", "a_str", ",", "b_str", ",", "threshold", "=", "0.6", ")", ":", "if", "(", "threshold", "==", "0", ")", ":", "return", "True", "elif", "(", "threshold", "==", "1.0", ")", ":", "return", "(", "a_str", "==", "b_str", ")", "if", "(", "len", "(", "b_str", ")", "<", "len", "(", "a_str", ")", ")", ":", "(", "a_str", ",", "b_str", ")", "=", "(", "b_str", ",", "a_str", ")", "alen", "=", "len", "(", "a_str", ")", "blen", "=", "len", "(", "b_str", ")", "if", "(", "(", "blen", "==", "0", ")", "or", "(", "alen", "==", "0", ")", ")", ":", "return", "(", "alen", "==", "blen", ")", "if", "(", "(", "blen", "==", "alen", ")", "and", "(", "a_str", "==", "b_str", ")", "and", "(", "threshold", "<=", "1.0", ")", ")", ":", "return", "True", "if", "(", "threshold", ">", "upper_bound_similarity", "(", "a_str", ",", "b_str", ")", ")", ":", "return", "False", "else", ":", "return", "(", "threshold", "<=", "difflib", ".", "SequenceMatcher", "(", "None", ",", "a_str", ",", "b_str", ")", ".", "quick_ratio", "(", ")", ")" ]
indicates if the strings to compare are similar enough .
train
false
38,449
def fcontext_apply_policy(name, recursive=False): ret = {} changes_text = fcontext_policy_is_applied(name, recursive) cmd = 'restorecon -v -F ' if recursive: cmd += '-R ' cmd += re.escape(name) apply_ret = __salt__['cmd.run_all'](cmd) ret.update(apply_ret) if (apply_ret['retcode'] == 0): changes_list = re.findall('restorecon reset (.*) context (.*)->(.*)$', changes_text, re.M) if (len(changes_list) > 0): ret.update({'changes': {}}) for item in changes_list: filespec = item[0] old = _context_string_to_dict(item[1]) new = _context_string_to_dict(item[2]) intersect = {} for (key, value) in old.iteritems(): if (new.get(key) == value): intersect.update({key: value}) for key in intersect.keys(): del old[key] del new[key] ret['changes'].update({filespec: {'old': old, 'new': new}}) return ret
[ "def", "fcontext_apply_policy", "(", "name", ",", "recursive", "=", "False", ")", ":", "ret", "=", "{", "}", "changes_text", "=", "fcontext_policy_is_applied", "(", "name", ",", "recursive", ")", "cmd", "=", "'restorecon -v -F '", "if", "recursive", ":", "cmd", "+=", "'-R '", "cmd", "+=", "re", ".", "escape", "(", "name", ")", "apply_ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "ret", ".", "update", "(", "apply_ret", ")", "if", "(", "apply_ret", "[", "'retcode'", "]", "==", "0", ")", ":", "changes_list", "=", "re", ".", "findall", "(", "'restorecon reset (.*) context (.*)->(.*)$'", ",", "changes_text", ",", "re", ".", "M", ")", "if", "(", "len", "(", "changes_list", ")", ">", "0", ")", ":", "ret", ".", "update", "(", "{", "'changes'", ":", "{", "}", "}", ")", "for", "item", "in", "changes_list", ":", "filespec", "=", "item", "[", "0", "]", "old", "=", "_context_string_to_dict", "(", "item", "[", "1", "]", ")", "new", "=", "_context_string_to_dict", "(", "item", "[", "2", "]", ")", "intersect", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "old", ".", "iteritems", "(", ")", ":", "if", "(", "new", ".", "get", "(", "key", ")", "==", "value", ")", ":", "intersect", ".", "update", "(", "{", "key", ":", "value", "}", ")", "for", "key", "in", "intersect", ".", "keys", "(", ")", ":", "del", "old", "[", "key", "]", "del", "new", "[", "key", "]", "ret", "[", "'changes'", "]", ".", "update", "(", "{", "filespec", ":", "{", "'old'", ":", "old", ",", "'new'", ":", "new", "}", "}", ")", "return", "ret" ]
applies selinux policies to filespec using restorecon [-r] filespec .
train
true
38,450
def reload_(name): term(name)
[ "def", "reload_", "(", "name", ")", ":", "term", "(", "name", ")" ]
send a hup to service via s6 cli example: .
train
false
38,453
def acl_group_add_users(id, users): group = models.AclGroup.smart_get(id) group.check_for_acl_violation_acl_group() users = models.User.smart_get_bulk(users) group.users.add(*users)
[ "def", "acl_group_add_users", "(", "id", ",", "users", ")", ":", "group", "=", "models", ".", "AclGroup", ".", "smart_get", "(", "id", ")", "group", ".", "check_for_acl_violation_acl_group", "(", ")", "users", "=", "models", ".", "User", ".", "smart_get_bulk", "(", "users", ")", "group", ".", "users", ".", "add", "(", "*", "users", ")" ]
add users to an acl group .
train
false
38,454
def _DecodeUrlSafe(urlsafe): if (not isinstance(urlsafe, basestring)): raise TypeError(('urlsafe must be a string; received %r' % urlsafe)) if isinstance(urlsafe, unicode): urlsafe = urlsafe.encode('utf8') mod = (len(urlsafe) % 4) if mod: urlsafe += ('=' * (4 - mod)) return base64.b64decode(urlsafe.replace('-', '+').replace('_', '/'))
[ "def", "_DecodeUrlSafe", "(", "urlsafe", ")", ":", "if", "(", "not", "isinstance", "(", "urlsafe", ",", "basestring", ")", ")", ":", "raise", "TypeError", "(", "(", "'urlsafe must be a string; received %r'", "%", "urlsafe", ")", ")", "if", "isinstance", "(", "urlsafe", ",", "unicode", ")", ":", "urlsafe", "=", "urlsafe", ".", "encode", "(", "'utf8'", ")", "mod", "=", "(", "len", "(", "urlsafe", ")", "%", "4", ")", "if", "mod", ":", "urlsafe", "+=", "(", "'='", "*", "(", "4", "-", "mod", ")", ")", "return", "base64", ".", "b64decode", "(", "urlsafe", ".", "replace", "(", "'-'", ",", "'+'", ")", ".", "replace", "(", "'_'", ",", "'/'", ")", ")" ]
decode a url-safe base64-encoded string .
train
true
38,455
@contextlib.contextmanager def silence(): old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() (yield) sys.stdout = old_stdout sys.stderr = old_stderr
[ "@", "contextlib", ".", "contextmanager", "def", "silence", "(", ")", ":", "old_stdout", "=", "sys", ".", "stdout", "old_stderr", "=", "sys", ".", "stderr", "sys", ".", "stdout", "=", "_DummyFile", "(", ")", "sys", ".", "stderr", "=", "_DummyFile", "(", ")", "(", "yield", ")", "sys", ".", "stdout", "=", "old_stdout", "sys", ".", "stderr", "=", "old_stderr" ]
a context manager that silences sys .
train
false