id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
54,282
def make_dependence_cmp(): depends = make_depends() def dependence(a, b): '\n A cmp function for nodes in a graph - does a depend on b?\n\n Returns\n -------\n int\n Positive number if a depends on b, negative number\n if b depends on a, 0 otherwise.\n\n ' if depends((a, b)): return 1 if depends((b, a)): return (-1) return 0 return dependence
[ "def", "make_dependence_cmp", "(", ")", ":", "depends", "=", "make_depends", "(", ")", "def", "dependence", "(", "a", ",", "b", ")", ":", "if", "depends", "(", "(", "a", ",", "b", ")", ")", ":", "return", "1", "if", "depends", "(", "(", "b", ",", "a", ")", ")", ":", "return", "(", "-", "1", ")", "return", "0", "return", "dependence" ]
create a comparator to represent the dependence of nodes in a graph .
train
false
54,283
def render_record(response, record, file_url=None, extra_data=None): data = {} if (extra_data is not None): data.update(extra_data) if (record is None): if (extra_data is None): response.set_status(404) response.out.write(render('details.html', data)) return data.update(get_details_data(record, file_url)) response.out.write(render('details.html', data))
[ "def", "render_record", "(", "response", ",", "record", ",", "file_url", "=", "None", ",", "extra_data", "=", "None", ")", ":", "data", "=", "{", "}", "if", "(", "extra_data", "is", "not", "None", ")", ":", "data", ".", "update", "(", "extra_data", ")", "if", "(", "record", "is", "None", ")", ":", "if", "(", "extra_data", "is", "None", ")", ":", "response", ".", "set_status", "(", "404", ")", "response", ".", "out", ".", "write", "(", "render", "(", "'details.html'", ",", "data", ")", ")", "return", "data", ".", "update", "(", "get_details_data", "(", "record", ",", "file_url", ")", ")", "response", ".", "out", ".", "write", "(", "render", "(", "'details.html'", ",", "data", ")", ")" ]
render an appstats record in detail .
train
false
54,285
def test_raw_unicode_escape_dashes(): ok = True try: unicode('hey', 'raw_unicode-escape') except LookupError: ok = False Assert(ok, 'dashes and underscores should be interchangable')
[ "def", "test_raw_unicode_escape_dashes", "(", ")", ":", "ok", "=", "True", "try", ":", "unicode", "(", "'hey'", ",", "'raw_unicode-escape'", ")", "except", "LookupError", ":", "ok", "=", "False", "Assert", "(", "ok", ",", "'dashes and underscores should be interchangable'", ")" ]
make sure that either dashes or underscores work in raw encoding name .
train
false
54,287
def test_missing_data_pandas(): X = np.random.random((10, 5)) X[(1, 2)] = np.nan df = pandas.DataFrame(X) (vals, cnames, rnames) = data.interpret_data(df) np.testing.assert_equal(rnames.tolist(), [0, 2, 3, 4, 5, 6, 7, 8, 9])
[ "def", "test_missing_data_pandas", "(", ")", ":", "X", "=", "np", ".", "random", ".", "random", "(", "(", "10", ",", "5", ")", ")", "X", "[", "(", "1", ",", "2", ")", "]", "=", "np", ".", "nan", "df", "=", "pandas", ".", "DataFrame", "(", "X", ")", "(", "vals", ",", "cnames", ",", "rnames", ")", "=", "data", ".", "interpret_data", "(", "df", ")", "np", ".", "testing", ".", "assert_equal", "(", "rnames", ".", "tolist", "(", ")", ",", "[", "0", ",", "2", ",", "3", ",", "4", ",", "5", ",", "6", ",", "7", ",", "8", ",", "9", "]", ")" ]
fixes gh: #144 .
train
false
54,288
def residue_reduce_to_basic(H, DE, z): i = Dummy('i') s = list(zip(reversed(DE.T), reversed([f(DE.x) for f in DE.Tfuncs]))) return sum((RootSum(a[0].as_poly(z), Lambda(i, (i * log(a[1].as_expr()).subs({z: i}).subs(s)))) for a in H))
[ "def", "residue_reduce_to_basic", "(", "H", ",", "DE", ",", "z", ")", ":", "i", "=", "Dummy", "(", "'i'", ")", "s", "=", "list", "(", "zip", "(", "reversed", "(", "DE", ".", "T", ")", ",", "reversed", "(", "[", "f", "(", "DE", ".", "x", ")", "for", "f", "in", "DE", ".", "Tfuncs", "]", ")", ")", ")", "return", "sum", "(", "(", "RootSum", "(", "a", "[", "0", "]", ".", "as_poly", "(", "z", ")", ",", "Lambda", "(", "i", ",", "(", "i", "*", "log", "(", "a", "[", "1", "]", ".", "as_expr", "(", ")", ")", ".", "subs", "(", "{", "z", ":", "i", "}", ")", ".", "subs", "(", "s", ")", ")", ")", ")", "for", "a", "in", "H", ")", ")" ]
converts the tuple returned by residue_reduce() into a basic expression .
train
false
54,289
def get_bs_object_for_view(request, view, user, object=None): with override_provides('service_provider_admin_form', ['shuup.testing.service_forms.PseudoPaymentProcessorForm', 'shuup.admin.modules.service_providers.forms:CustomCarrierForm', 'shuup.admin.modules.service_providers.forms:CustomPaymentProcessorForm']): request = apply_all_middleware(request, user=user) response = view(request, pk=(object.pk if object else None)) if hasattr(response, 'render'): response.render() assert (response.status_code in [200, 302]) return BeautifulSoup(response.content)
[ "def", "get_bs_object_for_view", "(", "request", ",", "view", ",", "user", ",", "object", "=", "None", ")", ":", "with", "override_provides", "(", "'service_provider_admin_form'", ",", "[", "'shuup.testing.service_forms.PseudoPaymentProcessorForm'", ",", "'shuup.admin.modules.service_providers.forms:CustomCarrierForm'", ",", "'shuup.admin.modules.service_providers.forms:CustomPaymentProcessorForm'", "]", ")", ":", "request", "=", "apply_all_middleware", "(", "request", ",", "user", "=", "user", ")", "response", "=", "view", "(", "request", ",", "pk", "=", "(", "object", ".", "pk", "if", "object", "else", "None", ")", ")", "if", "hasattr", "(", "response", ",", "'render'", ")", ":", "response", ".", "render", "(", ")", "assert", "(", "response", ".", "status_code", "in", "[", "200", ",", "302", "]", ")", "return", "BeautifulSoup", "(", "response", ".", "content", ")" ]
help function to get beautifulsoup object from the html rendered by the edit view .
train
false
54,291
@register.inclusion_tag('inclusion.html') def inclusion_two_params(one, two): return {'result': ('inclusion_two_params - Expected result: %s, %s' % (one, two))}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ")", "def", "inclusion_two_params", "(", "one", ",", "two", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_two_params - Expected result: %s, %s'", "%", "(", "one", ",", "two", ")", ")", "}" ]
expected inclusion_two_params __doc__ .
train
false
54,292
def call_command(name, *args, **options): try: app_name = get_commands()[name] if isinstance(app_name, BaseCommand): klass = app_name else: klass = load_command_class(app_name, name) except KeyError: raise CommandError(('Unknown command: %r' % name)) defaults = dict([(o.dest, o.default) for o in klass.option_list if (o.default is not NO_DEFAULT)]) defaults.update(options) return klass.execute(*args, **defaults)
[ "def", "call_command", "(", "name", ",", "*", "args", ",", "**", "options", ")", ":", "try", ":", "app_name", "=", "get_commands", "(", ")", "[", "name", "]", "if", "isinstance", "(", "app_name", ",", "BaseCommand", ")", ":", "klass", "=", "app_name", "else", ":", "klass", "=", "load_command_class", "(", "app_name", ",", "name", ")", "except", "KeyError", ":", "raise", "CommandError", "(", "(", "'Unknown command: %r'", "%", "name", ")", ")", "defaults", "=", "dict", "(", "[", "(", "o", ".", "dest", ",", "o", ".", "default", ")", "for", "o", "in", "klass", ".", "option_list", "if", "(", "o", ".", "default", "is", "not", "NO_DEFAULT", ")", "]", ")", "defaults", ".", "update", "(", "options", ")", "return", "klass", ".", "execute", "(", "*", "args", ",", "**", "defaults", ")" ]
calls the given command .
train
false
54,293
def HostAPI(*args, **kwargs): compute_api_class_name = _get_compute_api_class_name() compute_api_class = importutils.import_class(compute_api_class_name) class_name = (compute_api_class.__module__ + '.HostAPI') return importutils.import_object(class_name, *args, **kwargs)
[ "def", "HostAPI", "(", "*", "args", ",", "**", "kwargs", ")", ":", "compute_api_class_name", "=", "_get_compute_api_class_name", "(", ")", "compute_api_class", "=", "importutils", ".", "import_class", "(", "compute_api_class_name", ")", "class_name", "=", "(", "compute_api_class", ".", "__module__", "+", "'.HostAPI'", ")", "return", "importutils", ".", "import_object", "(", "class_name", ",", "*", "args", ",", "**", "kwargs", ")" ]
returns the hostapi class from the same module as the configured compute api .
train
false
54,294
def run_script_with_context(script_path, cwd, context): (_, extension) = os.path.splitext(script_path) contents = io.open(script_path, 'r', encoding='utf-8').read() with tempfile.NamedTemporaryFile(delete=False, mode='wb', suffix=extension) as temp: env = StrictEnvironment(context=context, keep_trailing_newline=True) template = env.from_string(contents) output = template.render(**context) temp.write(output.encode('utf-8')) run_script(temp.name, cwd)
[ "def", "run_script_with_context", "(", "script_path", ",", "cwd", ",", "context", ")", ":", "(", "_", ",", "extension", ")", "=", "os", ".", "path", ".", "splitext", "(", "script_path", ")", "contents", "=", "io", ".", "open", "(", "script_path", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", ".", "read", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "delete", "=", "False", ",", "mode", "=", "'wb'", ",", "suffix", "=", "extension", ")", "as", "temp", ":", "env", "=", "StrictEnvironment", "(", "context", "=", "context", ",", "keep_trailing_newline", "=", "True", ")", "template", "=", "env", ".", "from_string", "(", "contents", ")", "output", "=", "template", ".", "render", "(", "**", "context", ")", "temp", ".", "write", "(", "output", ".", "encode", "(", "'utf-8'", ")", ")", "run_script", "(", "temp", ".", "name", ",", "cwd", ")" ]
execute a script after rendering it with jinja .
train
true
54,295
def _filter_ipv4(addresses): return [address for address in addresses if is_valid_ip_address(address=address, family=socket.AF_INET)]
[ "def", "_filter_ipv4", "(", "addresses", ")", ":", "return", "[", "address", "for", "address", "in", "addresses", "if", "is_valid_ip_address", "(", "address", "=", "address", ",", "family", "=", "socket", ".", "AF_INET", ")", "]" ]
select ipv4 addresses from the list of ip addresses .
train
false
54,298
def bokeh_tree(name, rawtext, text, lineno, inliner, options=None, content=None): app = inliner.document.settings.env.app tag = app.env.config['version'] if ('-' in tag): tag = 'master' url = ('%s/tree/%s/%s' % (BOKEH_GH, tag, text)) _try_url(app, url, 'tree') options = (options or {}) set_classes(options) node = nodes.reference(rawtext, text, refuri=url, **options) return ([node], [])
[ "def", "bokeh_tree", "(", "name", ",", "rawtext", ",", "text", ",", "lineno", ",", "inliner", ",", "options", "=", "None", ",", "content", "=", "None", ")", ":", "app", "=", "inliner", ".", "document", ".", "settings", ".", "env", ".", "app", "tag", "=", "app", ".", "env", ".", "config", "[", "'version'", "]", "if", "(", "'-'", "in", "tag", ")", ":", "tag", "=", "'master'", "url", "=", "(", "'%s/tree/%s/%s'", "%", "(", "BOKEH_GH", ",", "tag", ",", "text", ")", ")", "_try_url", "(", "app", ",", "url", ",", "'tree'", ")", "options", "=", "(", "options", "or", "{", "}", ")", "set_classes", "(", "options", ")", "node", "=", "nodes", ".", "reference", "(", "rawtext", ",", "text", ",", "refuri", "=", "url", ",", "**", "options", ")", "return", "(", "[", "node", "]", ",", "[", "]", ")" ]
link to a url in the bokeh github tree .
train
true
54,299
def toptexts(query, texts, index, n=10): sims = index[query] sims = sorted(enumerate(sims), key=(lambda item: (- item[1]))) result = [] for (topid, topcosine) in sims[:n]: result.append((topid, topcosine, texts[topid])) return result
[ "def", "toptexts", "(", "query", ",", "texts", ",", "index", ",", "n", "=", "10", ")", ":", "sims", "=", "index", "[", "query", "]", "sims", "=", "sorted", "(", "enumerate", "(", "sims", ")", ",", "key", "=", "(", "lambda", "item", ":", "(", "-", "item", "[", "1", "]", ")", ")", ")", "result", "=", "[", "]", "for", "(", "topid", ",", "topcosine", ")", "in", "sims", "[", ":", "n", "]", ":", "result", ".", "append", "(", "(", "topid", ",", "topcosine", ",", "texts", "[", "topid", "]", ")", ")", "return", "result" ]
debug fnc to help inspect the top n most similar documents .
train
false
54,300
def KernelVersion(): rtl_osversioninfoexw = RtlOSVersionInfoExw() try: RtlGetVersion(rtl_osversioninfoexw) except exceptions.WindowsError: return 'unknown' return ('%d.%d.%d' % (rtl_osversioninfoexw.dwMajorVersion, rtl_osversioninfoexw.dwMinorVersion, rtl_osversioninfoexw.dwBuildNumber))
[ "def", "KernelVersion", "(", ")", ":", "rtl_osversioninfoexw", "=", "RtlOSVersionInfoExw", "(", ")", "try", ":", "RtlGetVersion", "(", "rtl_osversioninfoexw", ")", "except", "exceptions", ".", "WindowsError", ":", "return", "'unknown'", "return", "(", "'%d.%d.%d'", "%", "(", "rtl_osversioninfoexw", ".", "dwMajorVersion", ",", "rtl_osversioninfoexw", ".", "dwMinorVersion", ",", "rtl_osversioninfoexw", ".", "dwBuildNumber", ")", ")" ]
gets the kernel version as string .
train
false
54,301
@tasklets.tasklet def fetch_data_async(blob, start_index, end_index, **options): if isinstance(blob, BlobInfo): blob = blob.key() rpc = blobstore.create_rpc(**options) rpc = blobstore.fetch_data_async(blob, start_index, end_index, rpc=rpc) result = (yield rpc) raise tasklets.Return(result)
[ "@", "tasklets", ".", "tasklet", "def", "fetch_data_async", "(", "blob", ",", "start_index", ",", "end_index", ",", "**", "options", ")", ":", "if", "isinstance", "(", "blob", ",", "BlobInfo", ")", ":", "blob", "=", "blob", ".", "key", "(", ")", "rpc", "=", "blobstore", ".", "create_rpc", "(", "**", "options", ")", "rpc", "=", "blobstore", ".", "fetch_data_async", "(", "blob", ",", "start_index", ",", "end_index", ",", "rpc", "=", "rpc", ")", "result", "=", "(", "yield", "rpc", ")", "raise", "tasklets", ".", "Return", "(", "result", ")" ]
async version of fetch_data() .
train
true
54,302
def csd(x, y, fs=1.0, window='hann', nperseg=None, noverlap=None, nfft=None, detrend='constant', return_onesided=True, scaling='density', axis=(-1)): (freqs, _, Pxy) = _spectral_helper(x, y, fs, window, nperseg, noverlap, nfft, detrend, return_onesided, scaling, axis, mode='psd') if ((len(Pxy.shape) >= 2) and (Pxy.size > 0)): if (Pxy.shape[(-1)] > 1): Pxy = Pxy.mean(axis=(-1)) else: Pxy = np.reshape(Pxy, Pxy.shape[:(-1)]) return (freqs, Pxy)
[ "def", "csd", "(", "x", ",", "y", ",", "fs", "=", "1.0", ",", "window", "=", "'hann'", ",", "nperseg", "=", "None", ",", "noverlap", "=", "None", ",", "nfft", "=", "None", ",", "detrend", "=", "'constant'", ",", "return_onesided", "=", "True", ",", "scaling", "=", "'density'", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "(", "freqs", ",", "_", ",", "Pxy", ")", "=", "_spectral_helper", "(", "x", ",", "y", ",", "fs", ",", "window", ",", "nperseg", ",", "noverlap", ",", "nfft", ",", "detrend", ",", "return_onesided", ",", "scaling", ",", "axis", ",", "mode", "=", "'psd'", ")", "if", "(", "(", "len", "(", "Pxy", ".", "shape", ")", ">=", "2", ")", "and", "(", "Pxy", ".", "size", ">", "0", ")", ")", ":", "if", "(", "Pxy", ".", "shape", "[", "(", "-", "1", ")", "]", ">", "1", ")", ":", "Pxy", "=", "Pxy", ".", "mean", "(", "axis", "=", "(", "-", "1", ")", ")", "else", ":", "Pxy", "=", "np", ".", "reshape", "(", "Pxy", ",", "Pxy", ".", "shape", "[", ":", "(", "-", "1", ")", "]", ")", "return", "(", "freqs", ",", "Pxy", ")" ]
estimate the cross power spectral density .
train
false
54,304
def run_pyreverse(): from pylint.pyreverse.main import Run Run(sys.argv[1:])
[ "def", "run_pyreverse", "(", ")", ":", "from", "pylint", ".", "pyreverse", ".", "main", "import", "Run", "Run", "(", "sys", ".", "argv", "[", "1", ":", "]", ")" ]
run pyreverse .
train
false
54,305
def getIndexedLoopFromIndexedGrid(indexedGrid): indexedLoop = indexedGrid[0][:] for row in indexedGrid[1:(-1)]: indexedLoop.append(row[(-1)]) indexedLoop += indexedGrid[(-1)][::(-1)] for row in indexedGrid[(len(indexedGrid) - 2):0:(-1)]: indexedLoop.append(row[0]) return indexedLoop
[ "def", "getIndexedLoopFromIndexedGrid", "(", "indexedGrid", ")", ":", "indexedLoop", "=", "indexedGrid", "[", "0", "]", "[", ":", "]", "for", "row", "in", "indexedGrid", "[", "1", ":", "(", "-", "1", ")", "]", ":", "indexedLoop", ".", "append", "(", "row", "[", "(", "-", "1", ")", "]", ")", "indexedLoop", "+=", "indexedGrid", "[", "(", "-", "1", ")", "]", "[", ":", ":", "(", "-", "1", ")", "]", "for", "row", "in", "indexedGrid", "[", "(", "len", "(", "indexedGrid", ")", "-", "2", ")", ":", "0", ":", "(", "-", "1", ")", "]", ":", "indexedLoop", ".", "append", "(", "row", "[", "0", "]", ")", "return", "indexedLoop" ]
get indexed loop from around the indexed grid .
train
false
54,309
@_api_version(1.21) @_client_version('1.5.0') def connect_container_to_network(container, network_id): response = _client_wrapper('connect_container_to_network', container, network_id) _clear_context() return response
[ "@", "_api_version", "(", "1.21", ")", "@", "_client_version", "(", "'1.5.0'", ")", "def", "connect_container_to_network", "(", "container", ",", "network_id", ")", ":", "response", "=", "_client_wrapper", "(", "'connect_container_to_network'", ",", "container", ",", "network_id", ")", "_clear_context", "(", ")", "return", "response" ]
connect container to network .
train
false
54,310
def addListToListTable(elementList, key, listTable): if (key in listTable): listTable[key] += elementList else: listTable[key] = elementList
[ "def", "addListToListTable", "(", "elementList", ",", "key", ",", "listTable", ")", ":", "if", "(", "key", "in", "listTable", ")", ":", "listTable", "[", "key", "]", "+=", "elementList", "else", ":", "listTable", "[", "key", "]", "=", "elementList" ]
add a list to the list table .
train
false
54,311
def test_isotonic_regression_ties_secondary_(): x = [8, 8, 8, 10, 10, 10, 12, 12, 12, 14, 14] y = [21, 23.5, 23, 24, 21, 25, 21.5, 22, 19, 23.5, 25] y_true = [22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 24.25, 24.25] ir = IsotonicRegression() ir.fit(x, y) assert_array_almost_equal(ir.transform(x), y_true, 4) assert_array_almost_equal(ir.fit_transform(x, y), y_true, 4)
[ "def", "test_isotonic_regression_ties_secondary_", "(", ")", ":", "x", "=", "[", "8", ",", "8", ",", "8", ",", "10", ",", "10", ",", "10", ",", "12", ",", "12", ",", "12", ",", "14", ",", "14", "]", "y", "=", "[", "21", ",", "23.5", ",", "23", ",", "24", ",", "21", ",", "25", ",", "21.5", ",", "22", ",", "19", ",", "23.5", ",", "25", "]", "y_true", "=", "[", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "22.22222", ",", "24.25", ",", "24.25", "]", "ir", "=", "IsotonicRegression", "(", ")", "ir", ".", "fit", "(", "x", ",", "y", ")", "assert_array_almost_equal", "(", "ir", ".", "transform", "(", "x", ")", ",", "y_true", ",", "4", ")", "assert_array_almost_equal", "(", "ir", ".", "fit_transform", "(", "x", ",", "y", ")", ",", "y_true", ",", "4", ")" ]
test isotonic regression fit .
train
false
54,312
def publish_badges(request): return render(request, 'ecosystem/publish_badges.html', {'page': 'badges', 'category': 'publish'})
[ "def", "publish_badges", "(", "request", ")", ":", "return", "render", "(", "request", ",", "'ecosystem/publish_badges.html'", ",", "{", "'page'", ":", "'badges'", ",", "'category'", ":", "'publish'", "}", ")" ]
publish - marketplace badges .
train
false
54,316
def native(s): if (not isinstance(s, (binary_type, text_type))): raise TypeError(('%r is neither bytes nor unicode' % s)) if PY3: if isinstance(s, binary_type): return s.decode('utf-8') elif isinstance(s, text_type): return s.encode('utf-8') return s
[ "def", "native", "(", "s", ")", ":", "if", "(", "not", "isinstance", "(", "s", ",", "(", "binary_type", ",", "text_type", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'%r is neither bytes nor unicode'", "%", "s", ")", ")", "if", "PY3", ":", "if", "isinstance", "(", "s", ",", "binary_type", ")", ":", "return", "s", ".", "decode", "(", "'utf-8'", ")", "elif", "isinstance", "(", "s", ",", "text_type", ")", ":", "return", "s", ".", "encode", "(", "'utf-8'", ")", "return", "s" ]
convert :py:class:bytes or :py:class:unicode to the native :py:class:str type .
train
true
54,317
def test_init(): ratio = 1.0 cc = ClusterCentroids(ratio=ratio, random_state=RND_SEED) assert_equal(cc.ratio, ratio)
[ "def", "test_init", "(", ")", ":", "ratio", "=", "1.0", "cc", "=", "ClusterCentroids", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ")", "assert_equal", "(", "cc", ".", "ratio", ",", "ratio", ")" ]
test the initialisation of the object .
train
false
54,318
def test_ncr_fit_sample(): ncr = NeighbourhoodCleaningRule(random_state=RND_SEED) (X_resampled, y_resampled) = ncr.fit_sample(X, Y) X_gt = np.array([[(-1.20809175), (-1.49917302)], [(-0.60497017), (-0.66630228)], [(-0.91735824), 0.93110278], [(-0.20413357), 0.64628718], [0.35967591, 2.61186964], [(-1.55581933), 1.09609604], [1.55157493, (-1.6981518)]]) y_gt = np.array([0, 0, 1, 1, 2, 1, 2]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt)
[ "def", "test_ncr_fit_sample", "(", ")", ":", "ncr", "=", "NeighbourhoodCleaningRule", "(", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ")", "=", "ncr", ".", "fit_sample", "(", "X", ",", "Y", ")", "X_gt", "=", "np", ".", "array", "(", "[", "[", "(", "-", "1.20809175", ")", ",", "(", "-", "1.49917302", ")", "]", ",", "[", "(", "-", "0.60497017", ")", ",", "(", "-", "0.66630228", ")", "]", ",", "[", "(", "-", "0.91735824", ")", ",", "0.93110278", "]", ",", "[", "(", "-", "0.20413357", ")", ",", "0.64628718", "]", ",", "[", "0.35967591", ",", "2.61186964", "]", ",", "[", "(", "-", "1.55581933", ")", ",", "1.09609604", "]", ",", "[", "1.55157493", ",", "(", "-", "1.6981518", ")", "]", "]", ")", "y_gt", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "1", ",", "1", ",", "2", ",", "1", ",", "2", "]", ")", "assert_array_equal", "(", "X_resampled", ",", "X_gt", ")", "assert_array_equal", "(", "y_resampled", ",", "y_gt", ")" ]
test the fit sample routine .
train
false
54,320
def _get_or_default(mylist, i, default=None): if (i >= len(mylist)): return default else: return mylist[i]
[ "def", "_get_or_default", "(", "mylist", ",", "i", ",", "default", "=", "None", ")", ":", "if", "(", "i", ">=", "len", "(", "mylist", ")", ")", ":", "return", "default", "else", ":", "return", "mylist", "[", "i", "]" ]
return list item number .
train
true
54,321
def pseudo_quoteattr(value): return ('"%s"' % value)
[ "def", "pseudo_quoteattr", "(", "value", ")", ":", "return", "(", "'\"%s\"'", "%", "value", ")" ]
quote attributes for pseudo-xml .
train
false
54,322
def getFillOfSurroundings(surroundingLoops): fillOfSurroundings = [] for surroundingLoop in surroundingLoops: fillOfSurroundings += surroundingLoop.getFillLoops() return fillOfSurroundings
[ "def", "getFillOfSurroundings", "(", "surroundingLoops", ")", ":", "fillOfSurroundings", "=", "[", "]", "for", "surroundingLoop", "in", "surroundingLoops", ":", "fillOfSurroundings", "+=", "surroundingLoop", ".", "getFillLoops", "(", ")", "return", "fillOfSurroundings" ]
get extra fill loops of surrounding loops .
train
false
54,324
def update_exploration_summary(exploration_id, contributor_id_to_add): exploration = get_exploration_by_id(exploration_id) exp_summary = compute_summary_of_exploration(exploration, contributor_id_to_add) save_exploration_summary(exp_summary)
[ "def", "update_exploration_summary", "(", "exploration_id", ",", "contributor_id_to_add", ")", ":", "exploration", "=", "get_exploration_by_id", "(", "exploration_id", ")", "exp_summary", "=", "compute_summary_of_exploration", "(", "exploration", ",", "contributor_id_to_add", ")", "save_exploration_summary", "(", "exp_summary", ")" ]
update the summary of an exploration .
train
false
54,325
def clean_fuzzable_request(fuzzable_request, dc_handler=clean_data_container): res = ('(%s)-' % fuzzable_request.get_method().upper()) res += clean_url(fuzzable_request.get_uri(), dc_handler=dc_handler) raw_data = fuzzable_request.get_raw_data() if raw_data: res += ('!' + dc_handler(raw_data)) return res
[ "def", "clean_fuzzable_request", "(", "fuzzable_request", ",", "dc_handler", "=", "clean_data_container", ")", ":", "res", "=", "(", "'(%s)-'", "%", "fuzzable_request", ".", "get_method", "(", ")", ".", "upper", "(", ")", ")", "res", "+=", "clean_url", "(", "fuzzable_request", ".", "get_uri", "(", ")", ",", "dc_handler", "=", "dc_handler", ")", "raw_data", "=", "fuzzable_request", ".", "get_raw_data", "(", ")", "if", "raw_data", ":", "res", "+=", "(", "'!'", "+", "dc_handler", "(", "raw_data", ")", ")", "return", "res" ]
we receive a fuzzable request and output includes the http method and any parameters which might be sent over http post-data in the request are appended to the result as query string params .
train
false
54,326
@register.simple_tag def absolute_asset_url(module, path): return absolute_uri(get_asset_url(module, path))
[ "@", "register", ".", "simple_tag", "def", "absolute_asset_url", "(", "module", ",", "path", ")", ":", "return", "absolute_uri", "(", "get_asset_url", "(", "module", ",", "path", ")", ")" ]
returns a versioned absolute asset url .
train
false
54,330
@pytest.fixture def keyhint(qtbot, config_stub, key_config_stub): config_stub.data = {'colors': {'keyhint.fg': 'white', 'keyhint.fg.suffix': 'yellow', 'keyhint.bg': 'black'}, 'fonts': {'keyhint': 'Comic Sans'}, 'ui': {'keyhint-blacklist': '', 'status-position': 'bottom'}} keyhint = KeyHintView(0, None) qtbot.add_widget(keyhint) assert (keyhint.text() == '') return keyhint
[ "@", "pytest", ".", "fixture", "def", "keyhint", "(", "qtbot", ",", "config_stub", ",", "key_config_stub", ")", ":", "config_stub", ".", "data", "=", "{", "'colors'", ":", "{", "'keyhint.fg'", ":", "'white'", ",", "'keyhint.fg.suffix'", ":", "'yellow'", ",", "'keyhint.bg'", ":", "'black'", "}", ",", "'fonts'", ":", "{", "'keyhint'", ":", "'Comic Sans'", "}", ",", "'ui'", ":", "{", "'keyhint-blacklist'", ":", "''", ",", "'status-position'", ":", "'bottom'", "}", "}", "keyhint", "=", "KeyHintView", "(", "0", ",", "None", ")", "qtbot", ".", "add_widget", "(", "keyhint", ")", "assert", "(", "keyhint", ".", "text", "(", ")", "==", "''", ")", "return", "keyhint" ]
fixture to initialize a keyhintview .
train
false
54,331
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False): IMPL.aggregate_metadata_add(context, aggregate_id, metadata, set_delete)
[ "def", "aggregate_metadata_add", "(", "context", ",", "aggregate_id", ",", "metadata", ",", "set_delete", "=", "False", ")", ":", "IMPL", ".", "aggregate_metadata_add", "(", "context", ",", "aggregate_id", ",", "metadata", ",", "set_delete", ")" ]
add/update metadata .
train
false
54,332
def ensure_distance(dist): try: km = dist.km except AttributeError: raise SpatialError((u"'%s' does not appear to be a 'Distance' object." % dist)) return dist
[ "def", "ensure_distance", "(", "dist", ")", ":", "try", ":", "km", "=", "dist", ".", "km", "except", "AttributeError", ":", "raise", "SpatialError", "(", "(", "u\"'%s' does not appear to be a 'Distance' object.\"", "%", "dist", ")", ")", "return", "dist" ]
makes sure the parameter passed in is a distance object .
train
false
54,334
def iter_style_files(style_dir): for path in os.listdir(style_dir): filename = os.path.basename(path) if is_style_file(filename): match = STYLE_FILE_PATTERN.match(filename) path = os.path.abspath(os.path.join(style_dir, path)) (yield (path, match.groups()[0]))
[ "def", "iter_style_files", "(", "style_dir", ")", ":", "for", "path", "in", "os", ".", "listdir", "(", "style_dir", ")", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "if", "is_style_file", "(", "filename", ")", ":", "match", "=", "STYLE_FILE_PATTERN", ".", "match", "(", "filename", ")", "path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "style_dir", ",", "path", ")", ")", "(", "yield", "(", "path", ",", "match", ".", "groups", "(", ")", "[", "0", "]", ")", ")" ]
yield file path and name of styles in the given directory .
train
false
54,336
def is_trunk_service_port(port_name): return (is_trunk_bridge(port_name) or (port_name[:2] in (tman.TrunkParentPort.DEV_PREFIX, tman.SubPort.DEV_PREFIX)))
[ "def", "is_trunk_service_port", "(", "port_name", ")", ":", "return", "(", "is_trunk_bridge", "(", "port_name", ")", "or", "(", "port_name", "[", ":", "2", "]", "in", "(", "tman", ".", "TrunkParentPort", ".", "DEV_PREFIX", ",", "tman", ".", "SubPort", ".", "DEV_PREFIX", ")", ")", ")" ]
true if the port is any of the ports used to realize a trunk .
train
false
54,338
def spatial_dist_connectivity(src, dist, verbose=None): return spatio_temporal_dist_connectivity(src, 1, dist)
[ "def", "spatial_dist_connectivity", "(", "src", ",", "dist", ",", "verbose", "=", "None", ")", ":", "return", "spatio_temporal_dist_connectivity", "(", "src", ",", "1", ",", "dist", ")" ]
compute connectivity from distances in a source space .
train
false
54,339
def get_can_enable_dns(): if has_module(u'DNS'): return (True, None) else: return (False, _(u'PyDNS, which is required to find the domain controller, is not installed.'))
[ "def", "get_can_enable_dns", "(", ")", ":", "if", "has_module", "(", "u'DNS'", ")", ":", "return", "(", "True", ",", "None", ")", "else", ":", "return", "(", "False", ",", "_", "(", "u'PyDNS, which is required to find the domain controller, is not installed.'", ")", ")" ]
check whether we can query dns to find the domain controller to use .
train
false
54,341
def add_check(module, check): if ((not check.name) and (not service_id)): module.fail_json(msg='a check name is required for a node level check, one not attached to a service') consul_api = get_consul_api(module) check.register(consul_api) module.exit_json(changed=True, check_id=check.check_id, check_name=check.name, script=check.script, interval=check.interval, ttl=check.ttl, http=check.http, timeout=check.timeout, service_id=check.service_id)
[ "def", "add_check", "(", "module", ",", "check", ")", ":", "if", "(", "(", "not", "check", ".", "name", ")", "and", "(", "not", "service_id", ")", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "'a check name is required for a node level check, one not attached to a service'", ")", "consul_api", "=", "get_consul_api", "(", "module", ")", "check", ".", "register", "(", "consul_api", ")", "module", ".", "exit_json", "(", "changed", "=", "True", ",", "check_id", "=", "check", ".", "check_id", ",", "check_name", "=", "check", ".", "name", ",", "script", "=", "check", ".", "script", ",", "interval", "=", "check", ".", "interval", ",", "ttl", "=", "check", ".", "ttl", ",", "http", "=", "check", ".", "http", ",", "timeout", "=", "check", ".", "timeout", ",", "service_id", "=", "check", ".", "service_id", ")" ]
registers a check with the given agent .
train
false
54,343
def make_scorer(score_func, greater_is_better=True, needs_proba=False, needs_threshold=False, **kwargs): sign = (1 if greater_is_better else (-1)) if (needs_proba and needs_threshold): raise ValueError('Set either needs_proba or needs_threshold to True, but not both.') if needs_proba: cls = _ProbaScorer elif needs_threshold: cls = _ThresholdScorer else: cls = _PredictScorer return cls(score_func, sign, kwargs)
[ "def", "make_scorer", "(", "score_func", ",", "greater_is_better", "=", "True", ",", "needs_proba", "=", "False", ",", "needs_threshold", "=", "False", ",", "**", "kwargs", ")", ":", "sign", "=", "(", "1", "if", "greater_is_better", "else", "(", "-", "1", ")", ")", "if", "(", "needs_proba", "and", "needs_threshold", ")", ":", "raise", "ValueError", "(", "'Set either needs_proba or needs_threshold to True, but not both.'", ")", "if", "needs_proba", ":", "cls", "=", "_ProbaScorer", "elif", "needs_threshold", ":", "cls", "=", "_ThresholdScorer", "else", ":", "cls", "=", "_PredictScorer", "return", "cls", "(", "score_func", ",", "sign", ",", "kwargs", ")" ]
make a scorer from a performance metric or loss function .
train
false
54,344
def splitByCommas(str): out = [] if (not str): return out current = '' openQuotes = '"\'<({' closeQuotes = '"\'>)}' matchingQuote = '' insideQuotes = False nextIsLiteral = False for char in str: if nextIsLiteral: current += char nextIsLiteral = False elif insideQuotes: if (char == '\\'): nextIsLiteral = True else: current += char if (char == matchingQuote): insideQuotes = False elif (char == ','): out.append(current.strip()) current = '' else: current += char quoteIndex = openQuotes.find(char) if (quoteIndex > (-1)): matchingQuote = closeQuotes[quoteIndex] insideQuotes = True out.append(current.strip()) return out
[ "def", "splitByCommas", "(", "str", ")", ":", "out", "=", "[", "]", "if", "(", "not", "str", ")", ":", "return", "out", "current", "=", "''", "openQuotes", "=", "'\"\\'<({'", "closeQuotes", "=", "'\"\\'>)}'", "matchingQuote", "=", "''", "insideQuotes", "=", "False", "nextIsLiteral", "=", "False", "for", "char", "in", "str", ":", "if", "nextIsLiteral", ":", "current", "+=", "char", "nextIsLiteral", "=", "False", "elif", "insideQuotes", ":", "if", "(", "char", "==", "'\\\\'", ")", ":", "nextIsLiteral", "=", "True", "else", ":", "current", "+=", "char", "if", "(", "char", "==", "matchingQuote", ")", ":", "insideQuotes", "=", "False", "elif", "(", "char", "==", "','", ")", ":", "out", ".", "append", "(", "current", ".", "strip", "(", ")", ")", "current", "=", "''", "else", ":", "current", "+=", "char", "quoteIndex", "=", "openQuotes", ".", "find", "(", "char", ")", "if", "(", "quoteIndex", ">", "(", "-", "1", ")", ")", ":", "matchingQuote", "=", "closeQuotes", "[", "quoteIndex", "]", "insideQuotes", "=", "True", "out", ".", "append", "(", "current", ".", "strip", "(", ")", ")", "return", "out" ]
split a string by unenclosed commas: that is .
train
false
54,345
def read_style_directory(style_dir): styles = dict() for (path, name) in iter_style_files(style_dir): with warnings.catch_warnings(record=True) as warns: styles[name] = rc_params_from_file(path, use_default_template=False) for w in warns: message = (u'In %s: %s' % (path, w.message)) warnings.warn(message) return styles
[ "def", "read_style_directory", "(", "style_dir", ")", ":", "styles", "=", "dict", "(", ")", "for", "(", "path", ",", "name", ")", "in", "iter_style_files", "(", "style_dir", ")", ":", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "warns", ":", "styles", "[", "name", "]", "=", "rc_params_from_file", "(", "path", ",", "use_default_template", "=", "False", ")", "for", "w", "in", "warns", ":", "message", "=", "(", "u'In %s: %s'", "%", "(", "path", ",", "w", ".", "message", ")", ")", "warnings", ".", "warn", "(", "message", ")", "return", "styles" ]
return dictionary of styles defined in style_dir .
train
false
54,347
def render_home(framework_name): template = env.get_template('index.html') return template.render(providers=ASSEMBLED_CONFIG, oauth2_providers=OAUTH2_PROVIDERS, oauth1_providers=OAUTH1_PROVIDERS, openid_providers=OPENID_PROVIDERS, framework_name=framework_name)
[ "def", "render_home", "(", "framework_name", ")", ":", "template", "=", "env", ".", "get_template", "(", "'index.html'", ")", "return", "template", ".", "render", "(", "providers", "=", "ASSEMBLED_CONFIG", ",", "oauth2_providers", "=", "OAUTH2_PROVIDERS", ",", "oauth1_providers", "=", "OAUTH1_PROVIDERS", ",", "openid_providers", "=", "OPENID_PROVIDERS", ",", "framework_name", "=", "framework_name", ")" ]
renders the homepage .
train
false
54,348
def educateDashesOldSchoolInverted(str): str = re.sub('---', '&#8211;', str) str = re.sub('--', '&#8212;', str) return str
[ "def", "educateDashesOldSchoolInverted", "(", "str", ")", ":", "str", "=", "re", ".", "sub", "(", "'---'", ",", "'&#8211;'", ",", "str", ")", "str", "=", "re", ".", "sub", "(", "'--'", ",", "'&#8212;'", ",", "str", ")", "return", "str" ]
parameter: string .
train
false
54,349
def get_optional_parameters(dictionary): return {key: dictionary[key] for key in OPTIONAL_PARAMETERS if (key in dictionary)}
[ "def", "get_optional_parameters", "(", "dictionary", ")", ":", "return", "{", "key", ":", "dictionary", "[", "key", "]", "for", "key", "in", "OPTIONAL_PARAMETERS", "if", "(", "key", "in", "dictionary", ")", "}" ]
extract all optional lti parameters from a dictionary .
train
false
54,350
def retention_action(client, stream_name, retention_period=24, action='increase', check_mode=False): success = False err_msg = '' params = {'StreamName': stream_name} try: if (not check_mode): if (action == 'increase'): params['RetentionPeriodHours'] = retention_period client.increase_stream_retention_period(**params) success = True err_msg = 'Retention Period increased successfully to {0}'.format(retention_period) elif (action == 'decrease'): params['RetentionPeriodHours'] = retention_period client.decrease_stream_retention_period(**params) success = True err_msg = 'Retention Period decreased successfully to {0}'.format(retention_period) else: err_msg = 'Invalid action {0}'.format(action) elif (action == 'increase'): success = True elif (action == 'decrease'): success = True else: err_msg = 'Invalid action {0}'.format(action) except botocore.exceptions.ClientError as e: err_msg = str(e) return (success, err_msg)
[ "def", "retention_action", "(", "client", ",", "stream_name", ",", "retention_period", "=", "24", ",", "action", "=", "'increase'", ",", "check_mode", "=", "False", ")", ":", "success", "=", "False", "err_msg", "=", "''", "params", "=", "{", "'StreamName'", ":", "stream_name", "}", "try", ":", "if", "(", "not", "check_mode", ")", ":", "if", "(", "action", "==", "'increase'", ")", ":", "params", "[", "'RetentionPeriodHours'", "]", "=", "retention_period", "client", ".", "increase_stream_retention_period", "(", "**", "params", ")", "success", "=", "True", "err_msg", "=", "'Retention Period increased successfully to {0}'", ".", "format", "(", "retention_period", ")", "elif", "(", "action", "==", "'decrease'", ")", ":", "params", "[", "'RetentionPeriodHours'", "]", "=", "retention_period", "client", ".", "decrease_stream_retention_period", "(", "**", "params", ")", "success", "=", "True", "err_msg", "=", "'Retention Period decreased successfully to {0}'", ".", "format", "(", "retention_period", ")", "else", ":", "err_msg", "=", "'Invalid action {0}'", ".", "format", "(", "action", ")", "elif", "(", "action", "==", "'increase'", ")", ":", "success", "=", "True", "elif", "(", "action", "==", "'decrease'", ")", ":", "success", "=", "True", "else", ":", "err_msg", "=", "'Invalid action {0}'", ".", "format", "(", "action", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "e", ":", "err_msg", "=", "str", "(", "e", ")", "return", "(", "success", ",", "err_msg", ")" ]
increase or decreaste the retention of messages in the kinesis stream .
train
false
54,351
def test_javascript_url(): s = 'javascript:document.vulnerable' eq_(s, linkify(s))
[ "def", "test_javascript_url", "(", ")", ":", "s", "=", "'javascript:document.vulnerable'", "eq_", "(", "s", ",", "linkify", "(", "s", ")", ")" ]
javascript: urls should never be linkified .
train
false
54,352
def getSquareLoopWiddershins(beginComplex, endComplex): loop = [beginComplex, complex(endComplex.real, beginComplex.imag), endComplex] loop.append(complex(beginComplex.real, endComplex.imag)) return loop
[ "def", "getSquareLoopWiddershins", "(", "beginComplex", ",", "endComplex", ")", ":", "loop", "=", "[", "beginComplex", ",", "complex", "(", "endComplex", ".", "real", ",", "beginComplex", ".", "imag", ")", ",", "endComplex", "]", "loop", ".", "append", "(", "complex", "(", "beginComplex", ".", "real", ",", "endComplex", ".", "imag", ")", ")", "return", "loop" ]
get a square loop from the beginning to the end and back .
train
false
54,353
def ipython_display(clip, filetype=None, maxduration=60, t=None, fps=None, rd_kwargs=None, center=True, **html_kwargs): if (not ipython_available): raise ImportError('Only works inside an IPython Notebook') if (rd_kwargs is None): rd_kwargs = {} if (fps is not None): rd_kwargs['fps'] = fps if (t is not None): clip = clip.to_ImageClip(t) return HTML2(html_embed(clip, filetype=filetype, maxduration=maxduration, center=center, rd_kwargs=rd_kwargs, **html_kwargs))
[ "def", "ipython_display", "(", "clip", ",", "filetype", "=", "None", ",", "maxduration", "=", "60", ",", "t", "=", "None", ",", "fps", "=", "None", ",", "rd_kwargs", "=", "None", ",", "center", "=", "True", ",", "**", "html_kwargs", ")", ":", "if", "(", "not", "ipython_available", ")", ":", "raise", "ImportError", "(", "'Only works inside an IPython Notebook'", ")", "if", "(", "rd_kwargs", "is", "None", ")", ":", "rd_kwargs", "=", "{", "}", "if", "(", "fps", "is", "not", "None", ")", ":", "rd_kwargs", "[", "'fps'", "]", "=", "fps", "if", "(", "t", "is", "not", "None", ")", ":", "clip", "=", "clip", ".", "to_ImageClip", "(", "t", ")", "return", "HTML2", "(", "html_embed", "(", "clip", ",", "filetype", "=", "filetype", ",", "maxduration", "=", "maxduration", ",", "center", "=", "center", ",", "rd_kwargs", "=", "rd_kwargs", ",", "**", "html_kwargs", ")", ")" ]
clip either the name of a file .
train
false
54,354
def load_csr(*names): loader = _guess_loader(names[(-1)], OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.FILETYPE_ASN1) return OpenSSL.crypto.load_certificate_request(loader, load_vector(*names))
[ "def", "load_csr", "(", "*", "names", ")", ":", "loader", "=", "_guess_loader", "(", "names", "[", "(", "-", "1", ")", "]", ",", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ",", "OpenSSL", ".", "crypto", ".", "FILETYPE_ASN1", ")", "return", "OpenSSL", ".", "crypto", ".", "load_certificate_request", "(", "loader", ",", "load_vector", "(", "*", "names", ")", ")" ]
load certificate request .
train
false
54,355
def auto_openstack_logging(interface, original): return interface_decorator('auto_openstack_logging', interface, _openstack_logged_method, original)
[ "def", "auto_openstack_logging", "(", "interface", ",", "original", ")", ":", "return", "interface_decorator", "(", "'auto_openstack_logging'", ",", "interface", ",", "_openstack_logged_method", ",", "original", ")" ]
create a class decorator which will add openstack-specific exception logging versions versions of all of the methods on interface .
train
false
54,356
def join_options(options): rv = [] any_prefix_is_slash = False for opt in options: prefix = split_opt(opt)[0] if (prefix == '/'): any_prefix_is_slash = True rv.append((len(prefix), opt)) rv.sort(key=(lambda x: x[0])) rv = ', '.join((x[1] for x in rv)) return (rv, any_prefix_is_slash)
[ "def", "join_options", "(", "options", ")", ":", "rv", "=", "[", "]", "any_prefix_is_slash", "=", "False", "for", "opt", "in", "options", ":", "prefix", "=", "split_opt", "(", "opt", ")", "[", "0", "]", "if", "(", "prefix", "==", "'/'", ")", ":", "any_prefix_is_slash", "=", "True", "rv", ".", "append", "(", "(", "len", "(", "prefix", ")", ",", "opt", ")", ")", "rv", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", "[", "0", "]", ")", ")", "rv", "=", "', '", ".", "join", "(", "(", "x", "[", "1", "]", "for", "x", "in", "rv", ")", ")", "return", "(", "rv", ",", "any_prefix_is_slash", ")" ]
given a list of option strings this joins them in the most appropriate way and returns them in the form where the second item in the tuple is a flag that indicates if any of the option prefixes was a slash .
train
true
54,357
def fake_view(request): return HttpResponse(json.dumps(request.META))
[ "def", "fake_view", "(", "request", ")", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "request", ".", "META", ")", ")" ]
fake view that returns the request meta as a json-encoded string .
train
false
54,358
def user_field(user, field, *args): if (field and hasattr(user, field)): if args: v = args[0] if v: User = get_user_model() v = v[0:User._meta.get_field(field).max_length] setattr(user, field, v) else: return getattr(user, field)
[ "def", "user_field", "(", "user", ",", "field", ",", "*", "args", ")", ":", "if", "(", "field", "and", "hasattr", "(", "user", ",", "field", ")", ")", ":", "if", "args", ":", "v", "=", "args", "[", "0", "]", "if", "v", ":", "User", "=", "get_user_model", "(", ")", "v", "=", "v", "[", "0", ":", "User", ".", "_meta", ".", "get_field", "(", "field", ")", ".", "max_length", "]", "setattr", "(", "user", ",", "field", ",", "v", ")", "else", ":", "return", "getattr", "(", "user", ",", "field", ")" ]
gets or sets user model fields .
train
true
54,359
def set_marker(folder): name = cfg.marker_file() if name: path = os.path.join(folder, name) logging.debug('Create marker file %s', path) try: fp = open(path, 'w') fp.close() except: logging.info('Cannot create marker file %s', path) logging.info('Traceback: ', exc_info=True) name = None return name
[ "def", "set_marker", "(", "folder", ")", ":", "name", "=", "cfg", ".", "marker_file", "(", ")", "if", "name", ":", "path", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "name", ")", "logging", ".", "debug", "(", "'Create marker file %s'", ",", "path", ")", "try", ":", "fp", "=", "open", "(", "path", ",", "'w'", ")", "fp", ".", "close", "(", ")", "except", ":", "logging", ".", "info", "(", "'Cannot create marker file %s'", ",", "path", ")", "logging", ".", "info", "(", "'Traceback: '", ",", "exc_info", "=", "True", ")", "name", "=", "None", "return", "name" ]
set marker file and return name .
train
false
54,361
def commit_transaction(hostname, username, password, label): bigip_session = _build_session(username, password) trans_id = __salt__['grains.get']('bigip_f5_trans:{label}'.format(label=label)) if trans_id: payload = {} payload['state'] = 'VALIDATING' try: response = bigip_session.patch((BIG_IP_URL_BASE.format(host=hostname) + '/transaction/{trans_id}'.format(trans_id=trans_id)), data=json.dumps(payload)) return _load_response(response) except requests.exceptions.ConnectionError as e: return _load_connection_error(hostname, e) else: return 'Error: the label for this transaction was not defined as a grain. Begin a new transaction using the bigip.start_transaction function'
[ "def", "commit_transaction", "(", "hostname", ",", "username", ",", "password", ",", "label", ")", ":", "bigip_session", "=", "_build_session", "(", "username", ",", "password", ")", "trans_id", "=", "__salt__", "[", "'grains.get'", "]", "(", "'bigip_f5_trans:{label}'", ".", "format", "(", "label", "=", "label", ")", ")", "if", "trans_id", ":", "payload", "=", "{", "}", "payload", "[", "'state'", "]", "=", "'VALIDATING'", "try", ":", "response", "=", "bigip_session", ".", "patch", "(", "(", "BIG_IP_URL_BASE", ".", "format", "(", "host", "=", "hostname", ")", "+", "'/transaction/{trans_id}'", ".", "format", "(", "trans_id", "=", "trans_id", ")", ")", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ")", "return", "_load_response", "(", "response", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", "as", "e", ":", "return", "_load_connection_error", "(", "hostname", ",", "e", ")", "else", ":", "return", "'Error: the label for this transaction was not defined as a grain. Begin a new transaction using the bigip.start_transaction function'" ]
a function to connect to a bigip device and commit an existing transaction .
train
true
54,362
@register_stabilize @register_specialize @register_canonicalize @gof.local_optimizer([T.Elemwise]) def local_expm1(node): if (isinstance(node.op, T.Elemwise) and isinstance(node.op.scalar_op, theano.scalar.basic.Sub)): (in1, in2) = node.inputs out = node.outputs[0] if (in1.owner and isinstance(in1.owner.op, T.Elemwise) and isinstance(in1.owner.op.scalar_op, theano.scalar.basic.Exp) and (T.extract_constant(in2, only_process_constants=False) == 1)): in11 = in1.owner.inputs[0] new_out = T.expm1(in11) if (new_out.dtype != out.dtype): new_out = T.cast(new_out, dtype=out.dtype) if (new_out.type != out.type): return return [new_out]
[ "@", "register_stabilize", "@", "register_specialize", "@", "register_canonicalize", "@", "gof", ".", "local_optimizer", "(", "[", "T", ".", "Elemwise", "]", ")", "def", "local_expm1", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "T", ".", "Elemwise", ")", "and", "isinstance", "(", "node", ".", "op", ".", "scalar_op", ",", "theano", ".", "scalar", ".", "basic", ".", "Sub", ")", ")", ":", "(", "in1", ",", "in2", ")", "=", "node", ".", "inputs", "out", "=", "node", ".", "outputs", "[", "0", "]", "if", "(", "in1", ".", "owner", "and", "isinstance", "(", "in1", ".", "owner", ".", "op", ",", "T", ".", "Elemwise", ")", "and", "isinstance", "(", "in1", ".", "owner", ".", "op", ".", "scalar_op", ",", "theano", ".", "scalar", ".", "basic", ".", "Exp", ")", "and", "(", "T", ".", "extract_constant", "(", "in2", ",", "only_process_constants", "=", "False", ")", "==", "1", ")", ")", ":", "in11", "=", "in1", ".", "owner", ".", "inputs", "[", "0", "]", "new_out", "=", "T", ".", "expm1", "(", "in11", ")", "if", "(", "new_out", ".", "dtype", "!=", "out", ".", "dtype", ")", ":", "new_out", "=", "T", ".", "cast", "(", "new_out", ",", "dtype", "=", "out", ".", "dtype", ")", "if", "(", "new_out", ".", "type", "!=", "out", ".", "type", ")", ":", "return", "return", "[", "new_out", "]" ]
this optimization detects exp(a)-1 and converts this to expm1(a) .
train
false
54,363
def firstof(parser, token): bits = token.split_contents()[1:] if (len(bits) < 1): raise TemplateSyntaxError("'firstof' statement requires at least one argument") return FirstOfNode([parser.compile_filter(bit) for bit in bits])
[ "def", "firstof", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "[", "1", ":", "]", "if", "(", "len", "(", "bits", ")", "<", "1", ")", ":", "raise", "TemplateSyntaxError", "(", "\"'firstof' statement requires at least one argument\"", ")", "return", "FirstOfNode", "(", "[", "parser", ".", "compile_filter", "(", "bit", ")", "for", "bit", "in", "bits", "]", ")" ]
this is the future version of firstof with auto-escaping .
train
false
54,364
def _list_interface_private_addrs(eni_desc): primary = eni_desc.get('privateIpAddress') if (not primary): return None addresses = [primary] lst = eni_desc.get('privateIpAddressesSet', {}).get('item', []) if (not isinstance(lst, list)): return addresses for entry in lst: if (entry.get('primary') == 'true'): continue if entry.get('privateIpAddress'): addresses.append(entry.get('privateIpAddress')) return addresses
[ "def", "_list_interface_private_addrs", "(", "eni_desc", ")", ":", "primary", "=", "eni_desc", ".", "get", "(", "'privateIpAddress'", ")", "if", "(", "not", "primary", ")", ":", "return", "None", "addresses", "=", "[", "primary", "]", "lst", "=", "eni_desc", ".", "get", "(", "'privateIpAddressesSet'", ",", "{", "}", ")", ".", "get", "(", "'item'", ",", "[", "]", ")", "if", "(", "not", "isinstance", "(", "lst", ",", "list", ")", ")", ":", "return", "addresses", "for", "entry", "in", "lst", ":", "if", "(", "entry", ".", "get", "(", "'primary'", ")", "==", "'true'", ")", ":", "continue", "if", "entry", ".", "get", "(", "'privateIpAddress'", ")", ":", "addresses", ".", "append", "(", "entry", ".", "get", "(", "'privateIpAddress'", ")", ")", "return", "addresses" ]
returns a list of all of the private ip addresses attached to a network interface .
train
true
54,365
def _detect_gce_environment(): http = transport.get_http_object(timeout=GCE_METADATA_TIMEOUT) try: (response, _) = transport.request(http, _GCE_METADATA_URI, headers=_GCE_HEADERS) return ((response.status == http_client.OK) and (response.get(_METADATA_FLAVOR_HEADER) == _DESIRED_METADATA_FLAVOR)) except socket.error: logger.info('Timeout attempting to reach GCE metadata service.') return False
[ "def", "_detect_gce_environment", "(", ")", ":", "http", "=", "transport", ".", "get_http_object", "(", "timeout", "=", "GCE_METADATA_TIMEOUT", ")", "try", ":", "(", "response", ",", "_", ")", "=", "transport", ".", "request", "(", "http", ",", "_GCE_METADATA_URI", ",", "headers", "=", "_GCE_HEADERS", ")", "return", "(", "(", "response", ".", "status", "==", "http_client", ".", "OK", ")", "and", "(", "response", ".", "get", "(", "_METADATA_FLAVOR_HEADER", ")", "==", "_DESIRED_METADATA_FLAVOR", ")", ")", "except", "socket", ".", "error", ":", "logger", ".", "info", "(", "'Timeout attempting to reach GCE metadata service.'", ")", "return", "False" ]
determine if the current environment is compute engine .
train
true
54,366
def test_cache_remove_data(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': False}} url = 'http://qutebrowser.org' disk_cache = cache.DiskCache(str(tmpdir)) preload_cache(disk_cache, url) assert (disk_cache.cacheSize() > 0) assert disk_cache.remove(QUrl(url)) assert (disk_cache.cacheSize() == 0)
[ "def", "test_cache_remove_data", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "False", "}", "}", "url", "=", "'http://qutebrowser.org'", "disk_cache", "=", "cache", ".", "DiskCache", "(", "str", "(", "tmpdir", ")", ")", "preload_cache", "(", "disk_cache", ",", "url", ")", "assert", "(", "disk_cache", ".", "cacheSize", "(", ")", ">", "0", ")", "assert", "disk_cache", ".", "remove", "(", "QUrl", "(", "url", ")", ")", "assert", "(", "disk_cache", ".", "cacheSize", "(", ")", "==", "0", ")" ]
test if a previously inserted entry can be removed from the cache .
train
false
54,367
def _executeExternalCmdAndReapStdout(args): _debugOut(('_executeExternalCmdAndReapStdout: Starting...\n<%s>' % (args,))) p = subprocess.Popen(args, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _debugOut(('Process started for <%s>' % (args,))) (stdoutData, stderrData) = p.communicate() _debugOut((('Process completed for <%s>: exit status=%s, ' + 'stdoutDataType=%s, stdoutData=<%s>, stderrData=<%s>') % (args, p.returncode, type(stdoutData), stdoutData, stderrData))) result = dict(exitStatus=p.returncode, stdoutData=stdoutData, stderrData=stderrData) _debugOut(('_executeExternalCmdAndReapStdout for <%s>: result=\n%s' % (args, pprint.pformat(result, indent=4)))) return result
[ "def", "_executeExternalCmdAndReapStdout", "(", "args", ")", ":", "_debugOut", "(", "(", "'_executeExternalCmdAndReapStdout: Starting...\\n<%s>'", "%", "(", "args", ",", ")", ")", ")", "p", "=", "subprocess", ".", "Popen", "(", "args", ",", "env", "=", "os", ".", "environ", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "_debugOut", "(", "(", "'Process started for <%s>'", "%", "(", "args", ",", ")", ")", ")", "(", "stdoutData", ",", "stderrData", ")", "=", "p", ".", "communicate", "(", ")", "_debugOut", "(", "(", "(", "'Process completed for <%s>: exit status=%s, '", "+", "'stdoutDataType=%s, stdoutData=<%s>, stderrData=<%s>'", ")", "%", "(", "args", ",", "p", ".", "returncode", ",", "type", "(", "stdoutData", ")", ",", "stdoutData", ",", "stderrData", ")", ")", ")", "result", "=", "dict", "(", "exitStatus", "=", "p", ".", "returncode", ",", "stdoutData", "=", "stdoutData", ",", "stderrData", "=", "stderrData", ")", "_debugOut", "(", "(", "'_executeExternalCmdAndReapStdout for <%s>: result=\\n%s'", "%", "(", "args", ",", "pprint", ".", "pformat", "(", "result", ",", "indent", "=", "4", ")", ")", ")", ")", "return", "result" ]
args: args list as defined for the args parameter in subprocess .
train
false
54,368
def sfv_check(sfv_path): failed = [] try: fp = open(sfv_path, 'r') except: logging.info('Cannot open SFV file %s', sfv_path) failed.append(unicoder(sfv_path)) return failed root = os.path.split(sfv_path)[0] for line in fp: line = line.strip('\n\r ') if (line and (line[0] != ';')): x = line.rfind(' ') if (x > 0): filename = platform_encode(line[:x].strip()) checksum = line[x:].strip() path = os.path.join(root, filename) if os.path.exists(path): if crc_check(path, checksum): logging.debug('File %s passed SFV check', path) else: logging.info('File %s did not pass SFV check', path) failed.append(unicoder(filename)) else: logging.info('File %s missing in SFV check', path) failed.append(unicoder(filename)) fp.close() return failed
[ "def", "sfv_check", "(", "sfv_path", ")", ":", "failed", "=", "[", "]", "try", ":", "fp", "=", "open", "(", "sfv_path", ",", "'r'", ")", "except", ":", "logging", ".", "info", "(", "'Cannot open SFV file %s'", ",", "sfv_path", ")", "failed", ".", "append", "(", "unicoder", "(", "sfv_path", ")", ")", "return", "failed", "root", "=", "os", ".", "path", ".", "split", "(", "sfv_path", ")", "[", "0", "]", "for", "line", "in", "fp", ":", "line", "=", "line", ".", "strip", "(", "'\\n\\r '", ")", "if", "(", "line", "and", "(", "line", "[", "0", "]", "!=", "';'", ")", ")", ":", "x", "=", "line", ".", "rfind", "(", "' '", ")", "if", "(", "x", ">", "0", ")", ":", "filename", "=", "platform_encode", "(", "line", "[", ":", "x", "]", ".", "strip", "(", ")", ")", "checksum", "=", "line", "[", "x", ":", "]", ".", "strip", "(", ")", "path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "if", "crc_check", "(", "path", ",", "checksum", ")", ":", "logging", ".", "debug", "(", "'File %s passed SFV check'", ",", "path", ")", "else", ":", "logging", ".", "info", "(", "'File %s did not pass SFV check'", ",", "path", ")", "failed", ".", "append", "(", "unicoder", "(", "filename", ")", ")", "else", ":", "logging", ".", "info", "(", "'File %s missing in SFV check'", ",", "path", ")", "failed", ".", "append", "(", "unicoder", "(", "filename", ")", ")", "fp", ".", "close", "(", ")", "return", "failed" ]
verify files using sfv file .
train
false
54,369
def home_link(): return {'title': 'home', 'href': '/'}
[ "def", "home_link", "(", ")", ":", "return", "{", "'title'", ":", "'home'", ",", "'href'", ":", "'/'", "}" ]
returns a link to the api entry point/home page .
train
false
54,370
def assert_array_less(x, y, err_msg='', verbose=True): numpy.testing.assert_array_less(cupy.asnumpy(x), cupy.asnumpy(y), err_msg=err_msg, verbose=verbose)
[ "def", "assert_array_less", "(", "x", ",", "y", ",", "err_msg", "=", "''", ",", "verbose", "=", "True", ")", ":", "numpy", ".", "testing", ".", "assert_array_less", "(", "cupy", ".", "asnumpy", "(", "x", ")", ",", "cupy", ".", "asnumpy", "(", "y", ")", ",", "err_msg", "=", "err_msg", ",", "verbose", "=", "verbose", ")" ]
raises an assertionerror if array_like objects are not ordered by less than .
train
false
54,371
def load_token(session_file): if (not os.path.exists(session_file)): return with open(session_file) as handle: data = json.load(handle) for (mx_id, token) in data.items(): AUTH_TOKENS[mx_id] = token
[ "def", "load_token", "(", "session_file", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "session_file", ")", ")", ":", "return", "with", "open", "(", "session_file", ")", "as", "handle", ":", "data", "=", "json", ".", "load", "(", "handle", ")", "for", "(", "mx_id", ",", "token", ")", "in", "data", ".", "items", "(", ")", ":", "AUTH_TOKENS", "[", "mx_id", "]", "=", "token" ]
load authentication tokens from persistent storage .
train
false
54,372
@testing.requires_testing_data def test_make_field_map_eeg(): evoked = read_evokeds(evoked_fname, condition='Left Auditory') evoked.info['bads'] = ['MEG 2443', 'EEG 053'] surf = get_head_surf('sample', subjects_dir=subjects_dir) assert_raises(ValueError, _make_surface_mapping, evoked.info, surf, 'eeg') evoked.pick_types(meg=False, eeg=True) fmd = make_field_map(evoked, trans_fname, subject='sample', subjects_dir=subjects_dir) assert_raises(RuntimeError, make_field_map, evoked, None, subject='sample', subjects_dir=subjects_dir) fmd = make_field_map(evoked, trans_fname, subject='sample', subjects_dir=subjects_dir) assert_true((len(fmd) == 1)) assert_array_equal(fmd[0]['data'].shape, (642, 59)) assert_true(len(fmd[0]['ch_names']), 59)
[ "@", "testing", ".", "requires_testing_data", "def", "test_make_field_map_eeg", "(", ")", ":", "evoked", "=", "read_evokeds", "(", "evoked_fname", ",", "condition", "=", "'Left Auditory'", ")", "evoked", ".", "info", "[", "'bads'", "]", "=", "[", "'MEG 2443'", ",", "'EEG 053'", "]", "surf", "=", "get_head_surf", "(", "'sample'", ",", "subjects_dir", "=", "subjects_dir", ")", "assert_raises", "(", "ValueError", ",", "_make_surface_mapping", ",", "evoked", ".", "info", ",", "surf", ",", "'eeg'", ")", "evoked", ".", "pick_types", "(", "meg", "=", "False", ",", "eeg", "=", "True", ")", "fmd", "=", "make_field_map", "(", "evoked", ",", "trans_fname", ",", "subject", "=", "'sample'", ",", "subjects_dir", "=", "subjects_dir", ")", "assert_raises", "(", "RuntimeError", ",", "make_field_map", ",", "evoked", ",", "None", ",", "subject", "=", "'sample'", ",", "subjects_dir", "=", "subjects_dir", ")", "fmd", "=", "make_field_map", "(", "evoked", ",", "trans_fname", ",", "subject", "=", "'sample'", ",", "subjects_dir", "=", "subjects_dir", ")", "assert_true", "(", "(", "len", "(", "fmd", ")", "==", "1", ")", ")", "assert_array_equal", "(", "fmd", "[", "0", "]", "[", "'data'", "]", ".", "shape", ",", "(", "642", ",", "59", ")", ")", "assert_true", "(", "len", "(", "fmd", "[", "0", "]", "[", "'ch_names'", "]", ")", ",", "59", ")" ]
test interpolation of eeg field onto head .
train
false
54,375
def get_pending_file_rename(): vnames = ('PendingFileRenameOperations', 'PendingFileRenameOperations2') key = 'SYSTEM\\CurrentControlSet\\Control\\Session Manager' for vname in vnames: reg_ret = __salt__['reg.read_value']('HKLM', key, vname) if reg_ret['success']: log.debug('Found key: %s', key) if (reg_ret['vdata'] and (reg_ret['vdata'] != '(value not set)')): return True else: log.debug('Unable to access key: %s', key) return False
[ "def", "get_pending_file_rename", "(", ")", ":", "vnames", "=", "(", "'PendingFileRenameOperations'", ",", "'PendingFileRenameOperations2'", ")", "key", "=", "'SYSTEM\\\\CurrentControlSet\\\\Control\\\\Session Manager'", "for", "vname", "in", "vnames", ":", "reg_ret", "=", "__salt__", "[", "'reg.read_value'", "]", "(", "'HKLM'", ",", "key", ",", "vname", ")", "if", "reg_ret", "[", "'success'", "]", ":", "log", ".", "debug", "(", "'Found key: %s'", ",", "key", ")", "if", "(", "reg_ret", "[", "'vdata'", "]", "and", "(", "reg_ret", "[", "'vdata'", "]", "!=", "'(value not set)'", ")", ")", ":", "return", "True", "else", ":", "log", ".", "debug", "(", "'Unable to access key: %s'", ",", "key", ")", "return", "False" ]
determine whether there are pending file rename operations that require a reboot .
train
false
54,376
def fix_html(container, raw): root = container.parse_xhtml(raw) return serialize(root, u'text/html')
[ "def", "fix_html", "(", "container", ",", "raw", ")", ":", "root", "=", "container", ".", "parse_xhtml", "(", "raw", ")", "return", "serialize", "(", "root", ",", "u'text/html'", ")" ]
fix any parsing errors in the html represented as a string in raw .
train
false
54,377
def bivariate_normal(X, Y, sigmax=1.0, sigmay=1.0, mux=0.0, muy=0.0, sigmaxy=0.0): Xmu = (X - mux) Ymu = (Y - muy) rho = (sigmaxy / (sigmax * sigmay)) z = ((((Xmu ** 2) / (sigmax ** 2)) + ((Ymu ** 2) / (sigmay ** 2))) - ((((2 * rho) * Xmu) * Ymu) / (sigmax * sigmay))) denom = ((((2 * np.pi) * sigmax) * sigmay) * np.sqrt((1 - (rho ** 2)))) return (np.exp(((- z) / (2 * (1 - (rho ** 2))))) / denom)
[ "def", "bivariate_normal", "(", "X", ",", "Y", ",", "sigmax", "=", "1.0", ",", "sigmay", "=", "1.0", ",", "mux", "=", "0.0", ",", "muy", "=", "0.0", ",", "sigmaxy", "=", "0.0", ")", ":", "Xmu", "=", "(", "X", "-", "mux", ")", "Ymu", "=", "(", "Y", "-", "muy", ")", "rho", "=", "(", "sigmaxy", "/", "(", "sigmax", "*", "sigmay", ")", ")", "z", "=", "(", "(", "(", "(", "Xmu", "**", "2", ")", "/", "(", "sigmax", "**", "2", ")", ")", "+", "(", "(", "Ymu", "**", "2", ")", "/", "(", "sigmay", "**", "2", ")", ")", ")", "-", "(", "(", "(", "(", "2", "*", "rho", ")", "*", "Xmu", ")", "*", "Ymu", ")", "/", "(", "sigmax", "*", "sigmay", ")", ")", ")", "denom", "=", "(", "(", "(", "(", "2", "*", "np", ".", "pi", ")", "*", "sigmax", ")", "*", "sigmay", ")", "*", "np", ".", "sqrt", "(", "(", "1", "-", "(", "rho", "**", "2", ")", ")", ")", ")", "return", "(", "np", ".", "exp", "(", "(", "(", "-", "z", ")", "/", "(", "2", "*", "(", "1", "-", "(", "rho", "**", "2", ")", ")", ")", ")", ")", "/", "denom", ")" ]
bivariate gaussian distribution for equal shape *x* .
train
false
54,378
def sample_ids_from_metadata_description(mapping_f, valid_states_str): (map_data, map_header, map_comments) = parse_mapping_file(mapping_f) valid_states = parse_metadata_state_descriptions(valid_states_str) sample_ids = get_sample_ids(map_data, map_header, valid_states) if (len(sample_ids) < 1): raise ValueError('All samples have been filtered out for the criteria described in the valid states') return sample_ids
[ "def", "sample_ids_from_metadata_description", "(", "mapping_f", ",", "valid_states_str", ")", ":", "(", "map_data", ",", "map_header", ",", "map_comments", ")", "=", "parse_mapping_file", "(", "mapping_f", ")", "valid_states", "=", "parse_metadata_state_descriptions", "(", "valid_states_str", ")", "sample_ids", "=", "get_sample_ids", "(", "map_data", ",", "map_header", ",", "valid_states", ")", "if", "(", "len", "(", "sample_ids", ")", "<", "1", ")", ":", "raise", "ValueError", "(", "'All samples have been filtered out for the criteria described in the valid states'", ")", "return", "sample_ids" ]
given a description of metadata .
train
false
54,380
def graph_process(mg, lenmavlist): mg.show(lenmavlist)
[ "def", "graph_process", "(", "mg", ",", "lenmavlist", ")", ":", "mg", ".", "show", "(", "lenmavlist", ")" ]
process for a graph .
train
false
54,381
def get_profile_image_names(username): name = _make_profile_image_name(username) return {size: _get_profile_image_filename(name, size) for size in _PROFILE_IMAGE_SIZES}
[ "def", "get_profile_image_names", "(", "username", ")", ":", "name", "=", "_make_profile_image_name", "(", "username", ")", "return", "{", "size", ":", "_get_profile_image_filename", "(", "name", ",", "size", ")", "for", "size", "in", "_PROFILE_IMAGE_SIZES", "}" ]
returns a dict containing the filenames for a complete set of profile images .
train
false
54,383
def randomRange(start=0, stop=1000, seed=None): if (seed is not None): _ = getCurrentThreadData().random _.seed(seed) randint = _.randint else: randint = random.randint return int(randint(start, stop))
[ "def", "randomRange", "(", "start", "=", "0", ",", "stop", "=", "1000", ",", "seed", "=", "None", ")", ":", "if", "(", "seed", "is", "not", "None", ")", ":", "_", "=", "getCurrentThreadData", "(", ")", ".", "random", "_", ".", "seed", "(", "seed", ")", "randint", "=", "_", ".", "randint", "else", ":", "randint", "=", "random", ".", "randint", "return", "int", "(", "randint", "(", "start", ",", "stop", ")", ")" ]
returns random integer value in given range .
train
false
54,384
def correlate_output_formatter(bt, test_stats, pvals, fdr_pvals, bon_pvals, md_key): header = ['Feature ID', 'Test stat.', 'pval', 'pval_fdr', 'pval_bon', md_key] num_lines = len(test_stats) lines = [' DCTB '.join(header)] for i in range(num_lines): tmp = [bt.ids(axis='observation')[i], test_stats[i], pvals[i], fdr_pvals[i], bon_pvals[i]] lines.append(' DCTB '.join(map(str, tmp))) nls = _add_metadata(bt, md_key, lines) return nls
[ "def", "correlate_output_formatter", "(", "bt", ",", "test_stats", ",", "pvals", ",", "fdr_pvals", ",", "bon_pvals", ",", "md_key", ")", ":", "header", "=", "[", "'Feature ID'", ",", "'Test stat.'", ",", "'pval'", ",", "'pval_fdr'", ",", "'pval_bon'", ",", "md_key", "]", "num_lines", "=", "len", "(", "test_stats", ")", "lines", "=", "[", "' DCTB '", ".", "join", "(", "header", ")", "]", "for", "i", "in", "range", "(", "num_lines", ")", ":", "tmp", "=", "[", "bt", ".", "ids", "(", "axis", "=", "'observation'", ")", "[", "i", "]", ",", "test_stats", "[", "i", "]", ",", "pvals", "[", "i", "]", ",", "fdr_pvals", "[", "i", "]", ",", "bon_pvals", "[", "i", "]", "]", "lines", ".", "append", "(", "' DCTB '", ".", "join", "(", "map", "(", "str", ",", "tmp", ")", ")", ")", "nls", "=", "_add_metadata", "(", "bt", ",", "md_key", ",", "lines", ")", "return", "nls" ]
produce lines for a tab delimited text file for correlations .
train
false
54,385
def format_unixtime_ms(unixtime): if unixtime: return str(datetime.datetime.fromtimestamp((unixtime / 1000)).strftime('%x %X %Z')) else: return ''
[ "def", "format_unixtime_ms", "(", "unixtime", ")", ":", "if", "unixtime", ":", "return", "str", "(", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "(", "unixtime", "/", "1000", ")", ")", ".", "strftime", "(", "'%x %X %Z'", ")", ")", "else", ":", "return", "''" ]
format a unix timestamp in ms to a human readable string .
train
false
54,387
def _AddSetListenerMethod(cls): def SetListener(self, listener): if (listener is None): self._listener = message_listener_mod.NullMessageListener() else: self._listener = listener cls._SetListener = SetListener
[ "def", "_AddSetListenerMethod", "(", "cls", ")", ":", "def", "SetListener", "(", "self", ",", "listener", ")", ":", "if", "(", "listener", "is", "None", ")", ":", "self", ".", "_listener", "=", "message_listener_mod", ".", "NullMessageListener", "(", ")", "else", ":", "self", ".", "_listener", "=", "listener", "cls", ".", "_SetListener", "=", "SetListener" ]
helper for _addmessagemethods() .
train
false
54,388
@register_opt() @local_optimizer([SparseBlockOuter, GpuFromHost]) def gpu_sparse_block_outer_opt(node): if (isinstance(node.op, SparseBlockOuter) and any((_owner_isinstance(inp, HostFromGpu) for inp in node.inputs))): inputs = _clear_host_from_gpu(node.inputs) return [host_from_gpu(GpuSparseBlockOuter()(*inputs))] elif (isinstance(node.op, GpuFromHost) and _owner_isinstance(node.inputs[0], SparseBlockOuter)): meta_node = node.inputs[0].owner inputs = _clear_host_from_gpu(meta_node.inputs) return [GpuSparseBlockOuter()(*inputs)]
[ "@", "register_opt", "(", ")", "@", "local_optimizer", "(", "[", "SparseBlockOuter", ",", "GpuFromHost", "]", ")", "def", "gpu_sparse_block_outer_opt", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "SparseBlockOuter", ")", "and", "any", "(", "(", "_owner_isinstance", "(", "inp", ",", "HostFromGpu", ")", "for", "inp", "in", "node", ".", "inputs", ")", ")", ")", ":", "inputs", "=", "_clear_host_from_gpu", "(", "node", ".", "inputs", ")", "return", "[", "host_from_gpu", "(", "GpuSparseBlockOuter", "(", ")", "(", "*", "inputs", ")", ")", "]", "elif", "(", "isinstance", "(", "node", ".", "op", ",", "GpuFromHost", ")", "and", "_owner_isinstance", "(", "node", ".", "inputs", "[", "0", "]", ",", "SparseBlockOuter", ")", ")", ":", "meta_node", "=", "node", ".", "inputs", "[", "0", "]", ".", "owner", "inputs", "=", "_clear_host_from_gpu", "(", "meta_node", ".", "inputs", ")", "return", "[", "GpuSparseBlockOuter", "(", ")", "(", "*", "inputs", ")", "]" ]
sparseblockouter(hostfromgpu) -> hostfromgpu(gpusparseblockouter) or gpufromhost -> gpusparseblockouter .
train
false
54,389
def lerp(a, b, t): if (t < 0.0): return a if (t > 1.0): return b return (a + ((b - a) * t))
[ "def", "lerp", "(", "a", ",", "b", ",", "t", ")", ":", "if", "(", "t", "<", "0.0", ")", ":", "return", "a", "if", "(", "t", ">", "1.0", ")", ":", "return", "b", "return", "(", "a", "+", "(", "(", "b", "-", "a", ")", "*", "t", ")", ")" ]
returns the linear interpolation between a and b at time t between 0 .
train
true
54,391
def create_relationship(model, instance, relation): result = {} pk_value = primary_key_value(instance) self_link = url_for(model, pk_value, relation, relationship=True) related_link = url_for(model, pk_value, relation) result['links'] = {'self': self_link} try: related_model = get_related_model(model, relation) url_for(related_model) except ValueError: pass else: result['links']['related'] = related_link related_value = getattr(instance, relation) if is_like_list(instance, relation): result['data'] = list(map(simple_relationship_dump, related_value)) elif (related_value is not None): result['data'] = simple_relationship_dump(related_value) else: result['data'] = None return result
[ "def", "create_relationship", "(", "model", ",", "instance", ",", "relation", ")", ":", "result", "=", "{", "}", "pk_value", "=", "primary_key_value", "(", "instance", ")", "self_link", "=", "url_for", "(", "model", ",", "pk_value", ",", "relation", ",", "relationship", "=", "True", ")", "related_link", "=", "url_for", "(", "model", ",", "pk_value", ",", "relation", ")", "result", "[", "'links'", "]", "=", "{", "'self'", ":", "self_link", "}", "try", ":", "related_model", "=", "get_related_model", "(", "model", ",", "relation", ")", "url_for", "(", "related_model", ")", "except", "ValueError", ":", "pass", "else", ":", "result", "[", "'links'", "]", "[", "'related'", "]", "=", "related_link", "related_value", "=", "getattr", "(", "instance", ",", "relation", ")", "if", "is_like_list", "(", "instance", ",", "relation", ")", ":", "result", "[", "'data'", "]", "=", "list", "(", "map", "(", "simple_relationship_dump", ",", "related_value", ")", ")", "elif", "(", "related_value", "is", "not", "None", ")", ":", "result", "[", "'data'", "]", "=", "simple_relationship_dump", "(", "related_value", ")", "else", ":", "result", "[", "'data'", "]", "=", "None", "return", "result" ]
creates a relationship from the given relation name .
train
false
54,392
def _validate_post_params(params): try: username = params.get('username') user = User.objects.get(username=username) except User.DoesNotExist: msg = _('User {username} does not exist').format(username=username) return (None, HttpResponseBadRequest(msg)) try: course_key = CourseKey.from_string(params.get('course_key')) except InvalidKeyError: msg = _('{course_key} is not a valid course key').format(course_key=params.get('course_key')) return (None, HttpResponseBadRequest(msg)) return ({'user': user, 'course_key': course_key}, None)
[ "def", "_validate_post_params", "(", "params", ")", ":", "try", ":", "username", "=", "params", ".", "get", "(", "'username'", ")", "user", "=", "User", ".", "objects", ".", "get", "(", "username", "=", "username", ")", "except", "User", ".", "DoesNotExist", ":", "msg", "=", "_", "(", "'User {username} does not exist'", ")", ".", "format", "(", "username", "=", "username", ")", "return", "(", "None", ",", "HttpResponseBadRequest", "(", "msg", ")", ")", "try", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "params", ".", "get", "(", "'course_key'", ")", ")", "except", "InvalidKeyError", ":", "msg", "=", "_", "(", "'{course_key} is not a valid course key'", ")", ".", "format", "(", "course_key", "=", "params", ".", "get", "(", "'course_key'", ")", ")", "return", "(", "None", ",", "HttpResponseBadRequest", "(", "msg", ")", ")", "return", "(", "{", "'user'", ":", "user", ",", "'course_key'", ":", "course_key", "}", ",", "None", ")" ]
validate request post parameters to the generate and regenerate certificates end-point .
train
false
54,396
def testIBP(): from pybrain.tools.plotting.colormaps import ColorMap import pylab n = 50 ps = [(10, 0.1), (10,), (50,), (50, 0.5)] ms = [] for p in ps: if (len(p) > 1): m = generateIBP(n, p[0], p[1]) else: m = generateIBP(n, p[0]) ms.append(leftordered(m)) for m in ms: ColorMap(m, pixelspervalue=3) pylab.show()
[ "def", "testIBP", "(", ")", ":", "from", "pybrain", ".", "tools", ".", "plotting", ".", "colormaps", "import", "ColorMap", "import", "pylab", "n", "=", "50", "ps", "=", "[", "(", "10", ",", "0.1", ")", ",", "(", "10", ",", ")", ",", "(", "50", ",", ")", ",", "(", "50", ",", "0.5", ")", "]", "ms", "=", "[", "]", "for", "p", "in", "ps", ":", "if", "(", "len", "(", "p", ")", ">", "1", ")", ":", "m", "=", "generateIBP", "(", "n", ",", "p", "[", "0", "]", ",", "p", "[", "1", "]", ")", "else", ":", "m", "=", "generateIBP", "(", "n", ",", "p", "[", "0", "]", ")", "ms", ".", "append", "(", "leftordered", "(", "m", ")", ")", "for", "m", "in", "ms", ":", "ColorMap", "(", "m", ",", "pixelspervalue", "=", "3", ")", "pylab", ".", "show", "(", ")" ]
plot matrices generated by an ibp .
train
false
54,397
def p_command_let(p): p[0] = ('LET', p[2], p[4])
[ "def", "p_command_let", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "'LET'", ",", "p", "[", "2", "]", ",", "p", "[", "4", "]", ")" ]
command : let variable equals expr .
train
false
54,399
def queryset_iterator(queryset, chunksize=1000): if queryset.exists(): primary_key = 0 last_pk = queryset.order_by('-pk')[0].pk queryset = queryset.order_by('pk') while (primary_key < last_pk): for row in queryset.filter(pk__gt=primary_key)[:chunksize]: primary_key = row.pk (yield row) gc.collect()
[ "def", "queryset_iterator", "(", "queryset", ",", "chunksize", "=", "1000", ")", ":", "if", "queryset", ".", "exists", "(", ")", ":", "primary_key", "=", "0", "last_pk", "=", "queryset", ".", "order_by", "(", "'-pk'", ")", "[", "0", "]", ".", "pk", "queryset", "=", "queryset", ".", "order_by", "(", "'pk'", ")", "while", "(", "primary_key", "<", "last_pk", ")", ":", "for", "row", "in", "queryset", ".", "filter", "(", "pk__gt", "=", "primary_key", ")", "[", ":", "chunksize", "]", ":", "primary_key", "=", "row", ".", "pk", "(", "yield", "row", ")", "gc", ".", "collect", "(", ")" ]
the queryset iterator helps to keep the memory consumption down .
train
true
54,400
def _remove_intercept_patsy(terms): from patsy.desc import INTERCEPT if (INTERCEPT in terms): terms.remove(INTERCEPT) return terms
[ "def", "_remove_intercept_patsy", "(", "terms", ")", ":", "from", "patsy", ".", "desc", "import", "INTERCEPT", "if", "(", "INTERCEPT", "in", "terms", ")", ":", "terms", ".", "remove", "(", "INTERCEPT", ")", "return", "terms" ]
remove intercept from patsy terms .
train
false
54,401
def compat_tee(iterable): def gen(next, data={}, cnt=[0]): dpop = data.pop for i in itertools.count(): if (i == cnt[0]): item = data[i] = next() cnt[0] += 1 else: item = dpop(i) (yield item) next = iter(iterable).next return (gen(next), gen(next))
[ "def", "compat_tee", "(", "iterable", ")", ":", "def", "gen", "(", "next", ",", "data", "=", "{", "}", ",", "cnt", "=", "[", "0", "]", ")", ":", "dpop", "=", "data", ".", "pop", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "if", "(", "i", "==", "cnt", "[", "0", "]", ")", ":", "item", "=", "data", "[", "i", "]", "=", "next", "(", ")", "cnt", "[", "0", "]", "+=", "1", "else", ":", "item", "=", "dpop", "(", "i", ")", "(", "yield", "item", ")", "next", "=", "iter", "(", "iterable", ")", ".", "next", "return", "(", "gen", "(", "next", ")", ",", "gen", "(", "next", ")", ")" ]
return two independent iterators from a single iterable .
train
false
54,402
def get_protocol_from_tool_shed_url(tool_shed_url): try: if (tool_shed_url.find('://') > 0): return tool_shed_url.split('://')[0].lower() except Exception as e: if (tool_shed_url is not None): log.exception('Handled exception getting the protocol from Tool Shed URL %s:\n%s', str(tool_shed_url), e) return 'http'
[ "def", "get_protocol_from_tool_shed_url", "(", "tool_shed_url", ")", ":", "try", ":", "if", "(", "tool_shed_url", ".", "find", "(", "'://'", ")", ">", "0", ")", ":", "return", "tool_shed_url", ".", "split", "(", "'://'", ")", "[", "0", "]", ".", "lower", "(", ")", "except", "Exception", "as", "e", ":", "if", "(", "tool_shed_url", "is", "not", "None", ")", ":", "log", ".", "exception", "(", "'Handled exception getting the protocol from Tool Shed URL %s:\\n%s'", ",", "str", "(", "tool_shed_url", ")", ",", "e", ")", "return", "'http'" ]
return the protocol from the received tool_shed_url if it exists .
train
false
54,404
def format_acl_v2(acl_dict): return json.dumps(acl_dict, ensure_ascii=True, separators=(',', ':'), sort_keys=True)
[ "def", "format_acl_v2", "(", "acl_dict", ")", ":", "return", "json", ".", "dumps", "(", "acl_dict", ",", "ensure_ascii", "=", "True", ",", "separators", "=", "(", "','", ",", "':'", ")", ",", "sort_keys", "=", "True", ")" ]
returns a version-2 swift acl json string .
train
false
54,405
def test_version(): assert virtualenv.virtualenv_version, 'Should have version'
[ "def", "test_version", "(", ")", ":", "assert", "virtualenv", ".", "virtualenv_version", ",", "'Should have version'" ]
should have a version string .
train
false
54,406
def package_relationship_update(context, data_dict): model = context['model'] schema = (context.get('schema') or schema_.default_update_relationship_schema()) (id, id2, rel) = _get_or_bust(data_dict, ['subject', 'object', 'type']) pkg1 = model.Package.get(id) pkg2 = model.Package.get(id2) if (not pkg1): raise NotFound(('Subject package %r was not found.' % id)) if (not pkg2): return NotFound(('Object package %r was not found.' % id2)) (data, errors) = _validate(data_dict, schema, context) if errors: model.Session.rollback() raise ValidationError(errors) _check_access('package_relationship_update', context, data_dict) existing_rels = pkg1.get_relationships_with(pkg2, rel) if (not existing_rels): raise NotFound('This relationship between the packages was not found.') entity = existing_rels[0] comment = data_dict.get('comment', u'') context['relationship'] = entity return _update_package_relationship(entity, comment, context)
[ "def", "package_relationship_update", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "schema", "=", "(", "context", ".", "get", "(", "'schema'", ")", "or", "schema_", ".", "default_update_relationship_schema", "(", ")", ")", "(", "id", ",", "id2", ",", "rel", ")", "=", "_get_or_bust", "(", "data_dict", ",", "[", "'subject'", ",", "'object'", ",", "'type'", "]", ")", "pkg1", "=", "model", ".", "Package", ".", "get", "(", "id", ")", "pkg2", "=", "model", ".", "Package", ".", "get", "(", "id2", ")", "if", "(", "not", "pkg1", ")", ":", "raise", "NotFound", "(", "(", "'Subject package %r was not found.'", "%", "id", ")", ")", "if", "(", "not", "pkg2", ")", ":", "return", "NotFound", "(", "(", "'Object package %r was not found.'", "%", "id2", ")", ")", "(", "data", ",", "errors", ")", "=", "_validate", "(", "data_dict", ",", "schema", ",", "context", ")", "if", "errors", ":", "model", ".", "Session", ".", "rollback", "(", ")", "raise", "ValidationError", "(", "errors", ")", "_check_access", "(", "'package_relationship_update'", ",", "context", ",", "data_dict", ")", "existing_rels", "=", "pkg1", ".", "get_relationships_with", "(", "pkg2", ",", "rel", ")", "if", "(", "not", "existing_rels", ")", ":", "raise", "NotFound", "(", "'This relationship between the packages was not found.'", ")", "entity", "=", "existing_rels", "[", "0", "]", "comment", "=", "data_dict", ".", "get", "(", "'comment'", ",", "u''", ")", "context", "[", "'relationship'", "]", "=", "entity", "return", "_update_package_relationship", "(", "entity", ",", "comment", ",", "context", ")" ]
update a relationship between two datasets .
train
false
54,407
def add_required_label_tag(original_function): def required_label_tag(self, contents=None, attrs=None): 'Required label tag' contents = (contents or escape(self.label)) if self.field.required: if (not self.label.endswith(' *')): self.label += ' *' contents += ' *' attrs = {'class': 'required'} return original_function(self, contents, attrs) return required_label_tag
[ "def", "add_required_label_tag", "(", "original_function", ")", ":", "def", "required_label_tag", "(", "self", ",", "contents", "=", "None", ",", "attrs", "=", "None", ")", ":", "contents", "=", "(", "contents", "or", "escape", "(", "self", ".", "label", ")", ")", "if", "self", ".", "field", ".", "required", ":", "if", "(", "not", "self", ".", "label", ".", "endswith", "(", "' *'", ")", ")", ":", "self", ".", "label", "+=", "' *'", "contents", "+=", "' *'", "attrs", "=", "{", "'class'", ":", "'required'", "}", "return", "original_function", "(", "self", ",", "contents", ",", "attrs", ")", "return", "required_label_tag" ]
adds the required css class and an asterisks to required field labels .
train
false
54,408
def make_script_tests(executable): class ScriptTests(TestCase, ): @skipUnless(which(executable), (executable + ' not installed')) def test_version(self): '\n The script is a command available on the system path.\n ' result = run_process(([executable] + ['--version'])) self.assertEqual(result.output, ('%s\n' % (__version__,))) @skipUnless(which(executable), (executable + ' not installed')) def test_identification(self): '\n The script identifies itself as what it is.\n ' result = run_process(([executable] + ['--help'])) self.assertIn(executable, result.output) return ScriptTests
[ "def", "make_script_tests", "(", "executable", ")", ":", "class", "ScriptTests", "(", "TestCase", ",", ")", ":", "@", "skipUnless", "(", "which", "(", "executable", ")", ",", "(", "executable", "+", "' not installed'", ")", ")", "def", "test_version", "(", "self", ")", ":", "result", "=", "run_process", "(", "(", "[", "executable", "]", "+", "[", "'--version'", "]", ")", ")", "self", ".", "assertEqual", "(", "result", ".", "output", ",", "(", "'%s\\n'", "%", "(", "__version__", ",", ")", ")", ")", "@", "skipUnless", "(", "which", "(", "executable", ")", ",", "(", "executable", "+", "' not installed'", ")", ")", "def", "test_identification", "(", "self", ")", ":", "result", "=", "run_process", "(", "(", "[", "executable", "]", "+", "[", "'--help'", "]", ")", ")", "self", ".", "assertIn", "(", "executable", ",", "result", ".", "output", ")", "return", "ScriptTests" ]
generate a test suite which applies to any flocker-installed node script .
train
false
54,409
def _find_clickable_elem_with_wait(context, by, wait_time=MAX_WAIT_TIME): return WebDriverWait(context.browser, wait_time).until(EC.element_to_be_clickable(by))
[ "def", "_find_clickable_elem_with_wait", "(", "context", ",", "by", ",", "wait_time", "=", "MAX_WAIT_TIME", ")", ":", "return", "WebDriverWait", "(", "context", ".", "browser", ",", "wait_time", ")", ".", "until", "(", "EC", ".", "element_to_be_clickable", "(", "by", ")", ")" ]
tries to find an enabled element with an explicit timeout .
train
false
54,410
def get_issues_from_bulk(bulk_data, **additional_fields): return [models.Issue(subject=line, **additional_fields) for line in text.split_in_lines(bulk_data)]
[ "def", "get_issues_from_bulk", "(", "bulk_data", ",", "**", "additional_fields", ")", ":", "return", "[", "models", ".", "Issue", "(", "subject", "=", "line", ",", "**", "additional_fields", ")", "for", "line", "in", "text", ".", "split_in_lines", "(", "bulk_data", ")", "]" ]
convert bulk_data into a list of issues .
train
false
54,411
def question_metadata(): output = s3_rest_controller() return output
[ "def", "question_metadata", "(", ")", ":", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful crud controller .
train
false
54,412
def _find_es_dict_by_key(lookup_dict, term): if (term in lookup_dict): return (lookup_dict, term) dict_cursor = lookup_dict subkeys = term.split('.') subkey = '' while (len(subkeys) > 0): subkey += subkeys.pop(0) if (subkey in dict_cursor): if (len(subkeys) == 0): break dict_cursor = dict_cursor[subkey] subkey = '' elif (len(subkeys) == 0): dict_cursor = None subkey = None else: subkey += '.' return (dict_cursor, subkey)
[ "def", "_find_es_dict_by_key", "(", "lookup_dict", ",", "term", ")", ":", "if", "(", "term", "in", "lookup_dict", ")", ":", "return", "(", "lookup_dict", ",", "term", ")", "dict_cursor", "=", "lookup_dict", "subkeys", "=", "term", ".", "split", "(", "'.'", ")", "subkey", "=", "''", "while", "(", "len", "(", "subkeys", ")", ">", "0", ")", ":", "subkey", "+=", "subkeys", ".", "pop", "(", "0", ")", "if", "(", "subkey", "in", "dict_cursor", ")", ":", "if", "(", "len", "(", "subkeys", ")", "==", "0", ")", ":", "break", "dict_cursor", "=", "dict_cursor", "[", "subkey", "]", "subkey", "=", "''", "elif", "(", "len", "(", "subkeys", ")", "==", "0", ")", ":", "dict_cursor", "=", "None", "subkey", "=", "None", "else", ":", "subkey", "+=", "'.'", "return", "(", "dict_cursor", ",", "subkey", ")" ]
performs iterative dictionary search based upon the following conditions: 1 .
train
false
54,413
def set_ip_nonlocal_bind_for_namespace(namespace): failed = set_ip_nonlocal_bind(value=0, namespace=namespace, log_fail_as_error=False) if failed: LOG.warning(_LW('%s will not be set to 0 in the root namespace in order to not break DVR, which requires this value be set to 1. This may introduce a race between moving a floating IP to a different network node, and the peer side getting a populated ARP cache for a given floating IP address.'), IP_NONLOCAL_BIND)
[ "def", "set_ip_nonlocal_bind_for_namespace", "(", "namespace", ")", ":", "failed", "=", "set_ip_nonlocal_bind", "(", "value", "=", "0", ",", "namespace", "=", "namespace", ",", "log_fail_as_error", "=", "False", ")", "if", "failed", ":", "LOG", ".", "warning", "(", "_LW", "(", "'%s will not be set to 0 in the root namespace in order to not break DVR, which requires this value be set to 1. This may introduce a race between moving a floating IP to a different network node, and the peer side getting a populated ARP cache for a given floating IP address.'", ")", ",", "IP_NONLOCAL_BIND", ")" ]
set ip_nonlocal_bind but dont raise exception on failure .
train
false
54,414
def get_today_all(): ct._write_head() df = _parsing_dayprice_json(1) if (df is not None): for i in range(2, ct.PAGE_NUM[0]): newdf = _parsing_dayprice_json(i) df = df.append(newdf, ignore_index=True) return df
[ "def", "get_today_all", "(", ")", ":", "ct", ".", "_write_head", "(", ")", "df", "=", "_parsing_dayprice_json", "(", "1", ")", "if", "(", "df", "is", "not", "None", ")", ":", "for", "i", "in", "range", "(", "2", ",", "ct", ".", "PAGE_NUM", "[", "0", "]", ")", ":", "newdf", "=", "_parsing_dayprice_json", "(", "i", ")", "df", "=", "df", ".", "append", "(", "newdf", ",", "ignore_index", "=", "True", ")", "return", "df" ]
return dataframe .
train
false
54,415
def sort_otu_table(otu_table, sorted_sample_ids): sorted_sample_ids_set = set(sorted_sample_ids) if (set(otu_table.ids()) - sorted_sample_ids_set): raise KeyError(('Sample IDs present in OTU table but not sorted sample id list: ' + ' '.join(list((set(otu_table.ids()) - set(sorted_sample_ids)))))) if (len(sorted_sample_ids_set) != len(sorted_sample_ids)): raise ValueError('Duplicate sample IDs are present in sorted sample id list.') safe_sorted_sample_ids = [] for k in sorted_sample_ids: if otu_table.exists(k): safe_sorted_sample_ids.append(k) sorted_table = otu_table.sort_order(safe_sorted_sample_ids) return sorted_table
[ "def", "sort_otu_table", "(", "otu_table", ",", "sorted_sample_ids", ")", ":", "sorted_sample_ids_set", "=", "set", "(", "sorted_sample_ids", ")", "if", "(", "set", "(", "otu_table", ".", "ids", "(", ")", ")", "-", "sorted_sample_ids_set", ")", ":", "raise", "KeyError", "(", "(", "'Sample IDs present in OTU table but not sorted sample id list: '", "+", "' '", ".", "join", "(", "list", "(", "(", "set", "(", "otu_table", ".", "ids", "(", ")", ")", "-", "set", "(", "sorted_sample_ids", ")", ")", ")", ")", ")", ")", "if", "(", "len", "(", "sorted_sample_ids_set", ")", "!=", "len", "(", "sorted_sample_ids", ")", ")", ":", "raise", "ValueError", "(", "'Duplicate sample IDs are present in sorted sample id list.'", ")", "safe_sorted_sample_ids", "=", "[", "]", "for", "k", "in", "sorted_sample_ids", ":", "if", "otu_table", ".", "exists", "(", "k", ")", ":", "safe_sorted_sample_ids", ".", "append", "(", "k", ")", "sorted_table", "=", "otu_table", ".", "sort_order", "(", "safe_sorted_sample_ids", ")", "return", "sorted_table" ]
sort an otu table by sorted sample ids .
train
false
54,418
def prepro(I): I = I[35:195] I = I[::2, ::2, 0] I[(I == 144)] = 0 I[(I == 109)] = 0 I[(I != 0)] = 1 return I.astype(np.float).ravel()
[ "def", "prepro", "(", "I", ")", ":", "I", "=", "I", "[", "35", ":", "195", "]", "I", "=", "I", "[", ":", ":", "2", ",", ":", ":", "2", ",", "0", "]", "I", "[", "(", "I", "==", "144", ")", "]", "=", "0", "I", "[", "(", "I", "==", "109", ")", "]", "=", "0", "I", "[", "(", "I", "!=", "0", ")", "]", "=", "1", "return", "I", ".", "astype", "(", "np", ".", "float", ")", ".", "ravel", "(", ")" ]
prepro 210x160x3 uint8 frame into 6400 1d float vector .
train
true