id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
26,900
def test_compute_nearest(): x = rng.randn(500, 3) x /= np.sqrt(np.sum((x ** 2), axis=1))[:, None] nn_true = rng.permutation(np.arange(500, dtype=np.int))[:20] y = x[nn_true] nn1 = _compute_nearest(x, y, use_balltree=False) nn2 = _compute_nearest(x, y, use_balltree=True) assert_array_equal(nn_true, nn1) assert_array_equal(nn_true, nn2) nnn1 = _compute_nearest(x, y, use_balltree=False, return_dists=True) nnn2 = _compute_nearest(x, y, use_balltree=True, return_dists=True) assert_array_equal(nnn1[0], nn_true) assert_array_equal(nnn1[1], np.zeros_like(nn1)) assert_equal(len(nnn1), len(nnn2)) for (nn1, nn2) in zip(nnn1, nnn2): assert_array_equal(nn1, nn2)
[ "def", "test_compute_nearest", "(", ")", ":", "x", "=", "rng", ".", "randn", "(", "500", ",", "3", ")", "x", "/=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "x", "**", "2", ")", ",", "axis", "=", "1", ")", ")", "[", ":", ",", "N...
test nearest neighbor searches .
train
false
26,901
@set_database def get_topic_nodes_with_children(parent=None, **kwargs): if parent: Parent = Item.alias() Child = Item.alias() if (parent == 'root'): selector = Parent.parent.is_null() else: selector = (Parent.id == parent) child_values = [item for item in Item.select(Child).join(Child, on=(Child.parent == Item.pk)).join(Parent, on=(Item.parent == Parent.pk)).where(selector).dicts()] parent_values = [item for item in Item.select(Item).join(Parent, on=(Item.parent == Parent.pk)).where(selector).dicts()] topics = [] for topic in parent_values: output = {} output.update(topic) output['children'] = [child['id'] for child in child_values if (child['parent'] == topic['pk'])] topics.append(output) return topics
[ "@", "set_database", "def", "get_topic_nodes_with_children", "(", "parent", "=", "None", ",", "**", "kwargs", ")", ":", "if", "parent", ":", "Parent", "=", "Item", ".", "alias", "(", ")", "Child", "=", "Item", ".", "alias", "(", ")", "if", "(", "parent...
convenience function for returning a set of topic nodes with children listed as ids .
train
false
26,902
def _get_b64_chunks_from_str(string): chunks = [] while True: pad_loc = string.find('=') if ((pad_loc < 0) or (pad_loc == (len(string) - 1)) or (pad_loc == (len(string) - 2))): chunks.append(string) return chunks if ((pad_loc != (len(string) - 1)) and (string[(pad_loc + 1)] == '=')): pad_loc += 1 chunks.append(string[:(pad_loc + 1)]) string = string[(pad_loc + 1):] return chunks
[ "def", "_get_b64_chunks_from_str", "(", "string", ")", ":", "chunks", "=", "[", "]", "while", "True", ":", "pad_loc", "=", "string", ".", "find", "(", "'='", ")", "if", "(", "(", "pad_loc", "<", "0", ")", "or", "(", "pad_loc", "==", "(", "len", "("...
given a string of concatenated base64 objects .
train
false
26,903
def tensorsolve(a, b, axes=None): return TensorSolve(axes)(a, b)
[ "def", "tensorsolve", "(", "a", ",", "b", ",", "axes", "=", "None", ")", ":", "return", "TensorSolve", "(", "axes", ")", "(", "a", ",", "b", ")" ]
theano utilization of numpy .
train
false
26,905
def test_delayed_command_order(): null = (lambda : None) delays = [random.randint(0, 99) for x in range(5)] cmds = sorted([schedule.DelayedCommand.after(delay, null) for delay in delays]) assert ([c.delay.seconds for c in cmds] == sorted(delays))
[ "def", "test_delayed_command_order", "(", ")", ":", "null", "=", "(", "lambda", ":", "None", ")", "delays", "=", "[", "random", ".", "randint", "(", "0", ",", "99", ")", "for", "x", "in", "range", "(", "5", ")", "]", "cmds", "=", "sorted", "(", "...
delayed commands should be sorted by delay time .
train
false
26,906
def add_sr(path, sr_path=True, nocname=False, force_hostname=False, retain_extension=True, force_https=False, force_extension=None): if path.startswith(('#', 'javascript:')): return path u = UrlParser(path) if sr_path: u.path_add_subreddit(c.site) if ((not u.hostname) or force_hostname): u.hostname = get_domain(subreddit=False) if ((c.secure and u.is_reddit_url()) or force_https): u.scheme = 'https' if (force_extension is not None): u.set_extension(force_extension) elif retain_extension: if (c.render_style == 'mobile'): u.set_extension('mobile') elif (c.render_style == 'compact'): u.set_extension('compact') return u.unparse()
[ "def", "add_sr", "(", "path", ",", "sr_path", "=", "True", ",", "nocname", "=", "False", ",", "force_hostname", "=", "False", ",", "retain_extension", "=", "True", ",", "force_https", "=", "False", ",", "force_extension", "=", "None", ")", ":", "if", "pa...
given a path .
train
false
26,907
def test_tag(): class DummyModel(Model, ): 'The simplest instance of Model possible.' x = DummyModel() x.tag['foo']['bar'] = 5 assert (len(x.tag.keys()) == 1) assert (len(x.tag['foo'].keys()) == 1) assert (x.tag['foo']['bar'] == 5) assert ('bar' not in x.tag) x.tag['bar']['baz'] = 3 assert ('bar' in x.tag) assert ('baz' in x.tag['bar']) assert (len(x.tag.keys()) == 2)
[ "def", "test_tag", "(", ")", ":", "class", "DummyModel", "(", "Model", ",", ")", ":", "x", "=", "DummyModel", "(", ")", "x", ".", "tag", "[", "'foo'", "]", "[", "'bar'", "]", "=", "5", "assert", "(", "len", "(", "x", ".", "tag", ".", "keys", ...
test that the tag attribute works correctly .
train
false
26,908
def convert_range_args(method): def cell_wrapper(self, *args, **kwargs): try: if len(args): int(args[0]) except ValueError: if (':' in args[0]): (cell_1, cell_2) = args[0].split(':') (row_1, col_1) = xl_cell_to_rowcol(cell_1) (row_2, col_2) = xl_cell_to_rowcol(cell_2) else: (row_1, col_1) = xl_cell_to_rowcol(args[0]) (row_2, col_2) = (row_1, col_1) new_args = [row_1, col_1, row_2, col_2] new_args.extend(args[1:]) args = new_args return method(self, *args, **kwargs) return cell_wrapper
[ "def", "convert_range_args", "(", "method", ")", ":", "def", "cell_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "if", "len", "(", "args", ")", ":", "int", "(", "args", "[", "0", "]", ")", "except", "ValueError...
decorator function to convert a1 notation in range method calls to the default row/col notation .
train
false
26,909
def firewall(zones=None, interfaces=None, policy=None, rules=None, routestopped=None, masq=None): family = distrib_family() if (family != 'debian'): raise UnsupportedFamily(supported=['debian']) require_deb_package('shorewall') with watch(CONFIG_FILES) as config: _zone_config(zones) _interfaces_config(interfaces) _policy_config(policy) _rules_config(rules) _routestopped_config(routestopped) _masq_config(masq) if config.changed: puts('Shorewall configuration changed') if is_started(): restart('shorewall') with settings(hide('running'), shell_env()): sed('/etc/default/shorewall', 'startup=0', 'startup=1', use_sudo=True)
[ "def", "firewall", "(", "zones", "=", "None", ",", "interfaces", "=", "None", ",", "policy", "=", "None", ",", "rules", "=", "None", ",", "routestopped", "=", "None", ",", "masq", "=", "None", ")", ":", "family", "=", "distrib_family", "(", ")", "if"...
ensure that a firewall is configured .
train
false
26,910
def stop_volume(name, force=False): volinfo = info() if (name not in volinfo): log.error('Cannot stop non-existing volume {0}'.format(name)) return False if (int(volinfo[name]['status']) != 1): log.warning('Attempt to stop already stopped volume {0}'.format(name)) return True cmd = 'volume stop {0}'.format(name) if force: cmd += ' force' return _gluster(cmd)
[ "def", "stop_volume", "(", "name", ",", "force", "=", "False", ")", ":", "volinfo", "=", "info", "(", ")", "if", "(", "name", "not", "in", "volinfo", ")", ":", "log", ".", "error", "(", "'Cannot stop non-existing volume {0}'", ".", "format", "(", "name",...
stop a gluster volume .
train
true
26,911
def parse_link_header(instr): out = {} if (not instr): return out for link in [h.strip() for h in link_splitter.findall(instr)]: (url, params) = link.split('>', 1) url = url[1:] param_dict = {} for param in _splitstring(params, PARAMETER, '\\s*;\\s*'): try: (a, v) = param.split('=', 1) param_dict[a.lower()] = _unquotestring(v) except ValueError: param_dict[param.lower()] = None out[url] = param_dict return out
[ "def", "parse_link_header", "(", "instr", ")", ":", "out", "=", "{", "}", "if", "(", "not", "instr", ")", ":", "return", "out", "for", "link", "in", "[", "h", ".", "strip", "(", ")", "for", "h", "in", "link_splitter", ".", "findall", "(", "instr", ...
given a link-value .
train
true
26,916
def splu(A, permc_spec=None, diag_pivot_thresh=None, drop_tol=None, relax=None, panel_size=None, options=dict()): if (not isspmatrix_csc(A)): A = csc_matrix(A) warn('splu requires CSC matrix format', SparseEfficiencyWarning) A.sort_indices() A = A.asfptype() (M, N) = A.shape if (M != N): raise ValueError('can only factor square matrices') _options = dict(DiagPivotThresh=diag_pivot_thresh, ColPerm=permc_spec, PanelSize=panel_size, Relax=relax) if (options is not None): _options.update(options) return _superlu.gstrf(N, A.nnz, A.data, A.indices, A.indptr, ilu=False, options=_options)
[ "def", "splu", "(", "A", ",", "permc_spec", "=", "None", ",", "diag_pivot_thresh", "=", "None", ",", "drop_tol", "=", "None", ",", "relax", "=", "None", ",", "panel_size", "=", "None", ",", "options", "=", "dict", "(", ")", ")", ":", "if", "(", "no...
compute the lu decomposition of a sparse .
train
false
26,917
def dnn_version(): if (not dnn_available()): raise Exception("We can't determine the cudnn version as it is not available", dnn_available.msg) if (dnn_version.v is None): f = theano.function([], DnnVersion()(), theano.Mode(optimizer=None), profile=False) dnn_version.v = f() return dnn_version.v
[ "def", "dnn_version", "(", ")", ":", "if", "(", "not", "dnn_available", "(", ")", ")", ":", "raise", "Exception", "(", "\"We can't determine the cudnn version as it is not available\"", ",", "dnn_available", ".", "msg", ")", "if", "(", "dnn_version", ".", "v", "...
return the current cudnn version we compile with .
train
false
26,919
def test_launch(): client = logged_in_client() client.click(id='hue-jobbrowser-menu') client.waits.forElement(jquery='(".close")[0]', timeout='300') client.click(jquery='(".close")[0]')
[ "def", "test_launch", "(", ")", ":", "client", "=", "logged_in_client", "(", ")", "client", ".", "click", "(", "id", "=", "'hue-jobbrowser-menu'", ")", "client", ".", "waits", ".", "forElement", "(", "jquery", "=", "'(\".close\")[0]'", ",", "timeout", "=", ...
simply opens and closes jobbrowser .
train
false
26,922
def get_commit(): if (not os.path.exists(os.path.join(directories.dataDir, 'GIT-COMMIT'))): try: return subprocess.check_output('git rev-parse HEAD'.split()).strip() except: return 'unknown' fin = open(os.path.join(directories.dataDir, 'GIT-COMMIT'), 'rb') v = fin.read().strip() fin.close() return v
[ "def", "get_commit", "(", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "directories", ".", "dataDir", ",", "'GIT-COMMIT'", ")", ")", ")", ":", "try", ":", "return", "subprocess", ".", "chec...
loads the git commit id from the bundled version file .
train
false
26,924
def set_default_role(doc, method): if frappe.flags.setting_role: return contact_name = frappe.get_value(u'Contact', dict(email_id=doc.email)) if contact_name: contact = frappe.get_doc(u'Contact', contact_name) for link in contact.links: frappe.flags.setting_role = True if (link.link_doctype == u'Customer'): doc.add_roles(u'Customer') elif (link.link_doctype == u'Supplier'): doc.add_roles(u'Supplier') elif frappe.get_value(u'Student', dict(student_email_id=doc.email)): doc.add_roles(u'Student')
[ "def", "set_default_role", "(", "doc", ",", "method", ")", ":", "if", "frappe", ".", "flags", ".", "setting_role", ":", "return", "contact_name", "=", "frappe", ".", "get_value", "(", "u'Contact'", ",", "dict", "(", "email_id", "=", "doc", ".", "email", ...
set customer .
train
false
26,925
def test_base_modules_regex(pyi_builder): pyi_builder.test_source("\n import resources_testmod\n print('OK')\n ")
[ "def", "test_base_modules_regex", "(", "pyi_builder", ")", ":", "pyi_builder", ".", "test_source", "(", "\"\\n import resources_testmod\\n print('OK')\\n \"", ")" ]
verify that the regex for excluding modules listed in py3_base_modules does not exclude other modules .
train
false
26,926
def truth(a): return (True if a else False)
[ "def", "truth", "(", "a", ")", ":", "return", "(", "True", "if", "a", "else", "False", ")" ]
return true if a is true .
train
false
26,928
def mksls(fmt, src, dst=None): if (fmt == 'kickstart'): return salt.utils.kickstart.mksls(src, dst) elif (fmt == 'preseed'): return salt.utils.preseed.mksls(src, dst) elif (fmt == 'autoyast'): return salt.utils.yast.mksls(src, dst)
[ "def", "mksls", "(", "fmt", ",", "src", ",", "dst", "=", "None", ")", ":", "if", "(", "fmt", "==", "'kickstart'", ")", ":", "return", "salt", ".", "utils", ".", "kickstart", ".", "mksls", "(", "src", ",", "dst", ")", "elif", "(", "fmt", "==", "...
convert an autoyast file to an sls file .
train
true
26,930
def _do_self_dots_subset(intrad, rmags, rlens, cosmags, ws, volume, lut, n_fact, ch_type, idx): products = np.zeros((len(rmags), len(rmags))) for ci1 in idx: ci2 = (ci1 + 1) res = _fast_sphere_dot_r0(intrad, rmags[ci1], rmags[:ci2], rlens[ci1], rlens[:ci2], cosmags[ci1], cosmags[:ci2], ws[ci1], ws[:ci2], volume, lut, n_fact, ch_type) products[ci1, :ci2] = res products[:ci2, ci1] = res return products
[ "def", "_do_self_dots_subset", "(", "intrad", ",", "rmags", ",", "rlens", ",", "cosmags", ",", "ws", ",", "volume", ",", "lut", ",", "n_fact", ",", "ch_type", ",", "idx", ")", ":", "products", "=", "np", ".", "zeros", "(", "(", "len", "(", "rmags", ...
helper for parallelization .
train
false
26,931
def paired_distances(X, Y, metric='euclidean', **kwds): if (metric in PAIRED_DISTANCES): func = PAIRED_DISTANCES[metric] return func(X, Y) elif callable(metric): (X, Y) = check_paired_arrays(X, Y) distances = np.zeros(len(X)) for i in range(len(X)): distances[i] = metric(X[i], Y[i]) return distances else: raise ValueError(('Unknown distance %s' % metric))
[ "def", "paired_distances", "(", "X", ",", "Y", ",", "metric", "=", "'euclidean'", ",", "**", "kwds", ")", ":", "if", "(", "metric", "in", "PAIRED_DISTANCES", ")", ":", "func", "=", "PAIRED_DISTANCES", "[", "metric", "]", "return", "func", "(", "X", ","...
computes the paired distances between x and y .
train
false
26,932
def view_lookup(request, uri): api_prefix = ('/%s' % request.upath_info.split('/')[1]) path = _encoded((api_prefix + uri)) q = request.registry.queryUtility routes_mapper = q(IRoutesMapper) fakerequest = Request.blank(path=path) info = routes_mapper(fakerequest) (matchdict, route) = (info['match'], info['route']) if (route is None): raise ValueError('URI has no route') resource_name = route.name.replace('-record', '').replace('-collection', '') return (resource_name, matchdict)
[ "def", "view_lookup", "(", "request", ",", "uri", ")", ":", "api_prefix", "=", "(", "'/%s'", "%", "request", ".", "upath_info", ".", "split", "(", "'/'", ")", "[", "1", "]", ")", "path", "=", "_encoded", "(", "(", "api_prefix", "+", "uri", ")", ")"...
look-up the specified uri and return the associated resource name along the match dict .
train
false
26,933
def mute(fn): @wraps(fn) def wrapper(self, *args, **kwargs): thread_output_stream.setdefault(threading.current_thread(), []).append(DummyFile()) try: return fn(self, *args, **kwargs) finally: thread_output_stream[threading.current_thread()].pop() return wrapper
[ "def", "mute", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "thread_output_stream", ".", "setdefault", "(", "threading", ".", "current_thread", "(", ")", ",", "[", ...
decorate a function that prints to stdout .
train
false
26,934
def _formatXml(root): for elem in root.getiterator(): if ((len(elem) > 0) and ((not elem.text) or (not elem.text.strip()))): elem.text = '\n' if ((not elem.tail) or (not elem.tail.strip())): elem.tail = '\n'
[ "def", "_formatXml", "(", "root", ")", ":", "for", "elem", "in", "root", ".", "getiterator", "(", ")", ":", "if", "(", "(", "len", "(", "elem", ")", ">", "0", ")", "and", "(", "(", "not", "elem", ".", "text", ")", "or", "(", "not", "elem", "....
a helper to make the lrs output look nicer .
train
false
26,935
def oracle_passwd(password, salt, uppercase=True): binsalt = hexdecode(salt) retVal = ('s:%s%s' % (sha1((utf8encode(password) + binsalt)).hexdigest(), salt)) return (retVal.upper() if uppercase else retVal.lower())
[ "def", "oracle_passwd", "(", "password", ",", "salt", ",", "uppercase", "=", "True", ")", ":", "binsalt", "=", "hexdecode", "(", "salt", ")", "retVal", "=", "(", "'s:%s%s'", "%", "(", "sha1", "(", "(", "utf8encode", "(", "password", ")", "+", "binsalt"...
reference(s): URL URL URL .
train
false
26,937
def validate_course_id(course_id): try: course_key = CourseKey.from_string(unicode(course_id)) except InvalidKeyError: raise serializers.ValidationError(_('{course_id} is not a valid course key.').format(course_id=course_id)) if (not modulestore().has_course(course_key)): raise serializers.ValidationError(_('Course {course_id} does not exist.').format(course_id=course_id))
[ "def", "validate_course_id", "(", "course_id", ")", ":", "try", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "unicode", "(", "course_id", ")", ")", "except", "InvalidKeyError", ":", "raise", "serializers", ".", "ValidationError", "(", "_", "("...
check that course id is valid and exists in modulestore .
train
false
26,938
def makeImageAuto(inarray): return image2array(float_uint8(inarray))
[ "def", "makeImageAuto", "(", "inarray", ")", ":", "return", "image2array", "(", "float_uint8", "(", "inarray", ")", ")" ]
combines float_uint8 and image2array operations ie .
train
false
26,940
def is_flat_space(space): if isinstance(space, CompositeSpace): for sub_space in space.components: if isinstance(sub_space, CompositeSpace): return False elif (not isinstance(space, Space)): raise TypeError(('space is not a Space: %s (%s)' % (space, type(space)))) return True
[ "def", "is_flat_space", "(", "space", ")", ":", "if", "isinstance", "(", "space", ",", "CompositeSpace", ")", ":", "for", "sub_space", "in", "space", ".", "components", ":", "if", "isinstance", "(", "sub_space", ",", "CompositeSpace", ")", ":", "return", "...
returns true for elementary spaces and non-nested compositespaces parameters space : writeme returns writeme .
train
false
26,941
def blob_from_path_and_stat(fs_path, st): assert isinstance(fs_path, bytes) blob = Blob() if (not stat.S_ISLNK(st.st_mode)): with open(fs_path, 'rb') as f: blob.data = f.read() else: blob.data = os.readlink(fs_path) return blob
[ "def", "blob_from_path_and_stat", "(", "fs_path", ",", "st", ")", ":", "assert", "isinstance", "(", "fs_path", ",", "bytes", ")", "blob", "=", "Blob", "(", ")", "if", "(", "not", "stat", ".", "S_ISLNK", "(", "st", ".", "st_mode", ")", ")", ":", "with...
create a blob from a path and a stat object .
train
false
26,942
def get_interesting_mapping_fields(mapping_data, mapping_headers): result = [] num_samples = len(mapping_data) num_cols = len(mapping_headers) transposed_data = array(mapping_data).T for (header, datum) in zip(mapping_headers, transposed_data): d = set(datum) len_d = len(d) if ((len_d > 1) and (len_d < num_samples)): result.append(header) return result
[ "def", "get_interesting_mapping_fields", "(", "mapping_data", ",", "mapping_headers", ")", ":", "result", "=", "[", "]", "num_samples", "=", "len", "(", "mapping_data", ")", "num_cols", "=", "len", "(", "mapping_headers", ")", "transposed_data", "=", "array", "(...
returns headers for fields that are useful to color by in plots these fields are the ones that contain greater than one value and less values than the number of entries .
train
false
26,943
@plugins.notify_info_yielded(u'trackinfo_received') def tracks_for_id(track_id): t = track_for_mbid(track_id) if t: (yield t) for t in plugins.track_for_id(track_id): if t: (yield t)
[ "@", "plugins", ".", "notify_info_yielded", "(", "u'trackinfo_received'", ")", "def", "tracks_for_id", "(", "track_id", ")", ":", "t", "=", "track_for_mbid", "(", "track_id", ")", "if", "t", ":", "(", "yield", "t", ")", "for", "t", "in", "plugins", ".", ...
get a list of tracks for an id .
train
false
26,944
def new_subreddit(sr): amqp.add_item('new_subreddit', sr._fullname)
[ "def", "new_subreddit", "(", "sr", ")", ":", "amqp", ".", "add_item", "(", "'new_subreddit'", ",", "sr", ".", "_fullname", ")" ]
no precomputed queries here yet .
train
false
26,945
def stop_cover_tilt(hass, entity_id=None): data = ({ATTR_ENTITY_ID: entity_id} if entity_id else None) hass.services.call(DOMAIN, SERVICE_STOP_COVER_TILT, data)
[ "def", "stop_cover_tilt", "(", "hass", ",", "entity_id", "=", "None", ")", ":", "data", "=", "(", "{", "ATTR_ENTITY_ID", ":", "entity_id", "}", "if", "entity_id", "else", "None", ")", "hass", ".", "services", ".", "call", "(", "DOMAIN", ",", "SERVICE_STO...
stop all or specified cover tilt .
train
false
26,946
def compare_branches(): view = CompareBranchesDialog(qtutils.active_window()) view.show() return view
[ "def", "compare_branches", "(", ")", ":", "view", "=", "CompareBranchesDialog", "(", "qtutils", ".", "active_window", "(", ")", ")", "view", ".", "show", "(", ")", "return", "view" ]
launches a dialog for comparing a pair of branches .
train
false
26,947
def p_command_next(p): p[0] = ('NEXT', p[2])
[ "def", "p_command_next", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "'NEXT'", ",", "p", "[", "2", "]", ")" ]
command : next id .
train
false
26,948
def CDL3BLACKCROWS(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDL3BLACKCROWS)
[ "def", "CDL3BLACKCROWS", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDL3BLACKCROWS", ")" ]
three black crows .
train
false
26,949
def patch_lib_paths(fname, library_dirs): if (sys.platform != 'darwin'): return libs = _get_libs(fname) for lib in libs: if (not lib.startswith(('@', '/'))): real_lib = _find_library(lib, library_dirs) if real_lib: _install_name_change(fname, lib, real_lib)
[ "def", "patch_lib_paths", "(", "fname", ",", "library_dirs", ")", ":", "if", "(", "sys", ".", "platform", "!=", "'darwin'", ")", ":", "return", "libs", "=", "_get_libs", "(", "fname", ")", "for", "lib", "in", "libs", ":", "if", "(", "not", "lib", "."...
load any weakly-defined libraries from their real location - find libraries with otool -l - update with install_name_tool -change .
train
true
26,950
def make_query_context(type, info): if (type == 'table'): return ('%s:%s' % (type, info)) elif (type == 'design'): return ('%s:%s' % (type, int(info))) LOG.error(('Invalid query context type: %s' % (type,))) return ''
[ "def", "make_query_context", "(", "type", ",", "info", ")", ":", "if", "(", "type", "==", "'table'", ")", ":", "return", "(", "'%s:%s'", "%", "(", "type", ",", "info", ")", ")", "elif", "(", "type", "==", "'design'", ")", ":", "return", "(", "'%s:%...
type is one of "table" and "design" .
train
false
26,951
def shutdown_server_kill_pending_requests(sock, worker_pool, wait_time=2): worker_pool.resize(0) sock.close() active_requests = worker_pool.running() LOG.info('Shutting down. Requests left: %s', active_requests) if (active_requests > 0): eventlet.sleep(wait_time) running_corutines = worker_pool.coroutines_running.copy() for coro in running_corutines: eventlet.greenthread.kill(coro) LOG.info('Exiting...') raise SystemExit()
[ "def", "shutdown_server_kill_pending_requests", "(", "sock", ",", "worker_pool", ",", "wait_time", "=", "2", ")", ":", "worker_pool", ".", "resize", "(", "0", ")", "sock", ".", "close", "(", ")", "active_requests", "=", "worker_pool", ".", "running", "(", ")...
custom wsgi server shutdown function which gives outgoing requests some time to finish before killing them .
train
false
26,953
def _get_recently_enrolled_courses(course_enrollments): seconds = DashboardConfiguration.current().recent_enrollment_time_delta time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds)) return [enrollment for enrollment in course_enrollments if (enrollment.is_active and (enrollment.created > time_delta))]
[ "def", "_get_recently_enrolled_courses", "(", "course_enrollments", ")", ":", "seconds", "=", "DashboardConfiguration", ".", "current", "(", ")", ".", "recent_enrollment_time_delta", "time_delta", "=", "(", "datetime", ".", "datetime", ".", "now", "(", "UTC", ")", ...
given a list of enrollments .
train
false
26,954
def pyeapi_result(output): return output[0]['result']
[ "def", "pyeapi_result", "(", "output", ")", ":", "return", "output", "[", "0", "]", "[", "'result'", "]" ]
return the result value from the pyeapi output .
train
false
26,955
def is_redirection(status): return (300 <= status <= 399)
[ "def", "is_redirection", "(", "status", ")", ":", "return", "(", "300", "<=", "status", "<=", "399", ")" ]
check if http status code is redirection .
train
false
26,956
def _check_hash(target_hash_file, **options): source_hash_file = _get_hash_file_path(target_hash_file) with open(target_hash_file) as f: target_hash_content = hashlib.sha1(f.read().encode('utf8')).hexdigest() if os.path.exists(source_hash_file): with open(source_hash_file) as f: source_hash_content = f.read().strip() else: source_hash_content = None with open(source_hash_file, 'w') as f: f.write(target_hash_content) return (source_hash_content == target_hash_content)
[ "def", "_check_hash", "(", "target_hash_file", ",", "**", "options", ")", ":", "source_hash_file", "=", "_get_hash_file_path", "(", "target_hash_file", ")", "with", "open", "(", "target_hash_file", ")", "as", "f", ":", "target_hash_content", "=", "hashlib", ".", ...
this function has a side effect of creating a new hash file or updating the old hash file .
train
false
26,957
def _approximate_mode(class_counts, n_draws, rng): continuous = ((n_draws * class_counts) / class_counts.sum()) floored = np.floor(continuous) need_to_add = int((n_draws - floored.sum())) if (need_to_add > 0): remainder = (continuous - floored) values = np.sort(np.unique(remainder))[::(-1)] for value in values: (inds,) = np.where((remainder == value)) add_now = min(len(inds), need_to_add) inds = choice(inds, size=add_now, replace=False, random_state=rng) floored[inds] += 1 need_to_add -= add_now if (need_to_add == 0): break return floored.astype(np.int)
[ "def", "_approximate_mode", "(", "class_counts", ",", "n_draws", ",", "rng", ")", ":", "continuous", "=", "(", "(", "n_draws", "*", "class_counts", ")", "/", "class_counts", ".", "sum", "(", ")", ")", "floored", "=", "np", ".", "floor", "(", "continuous"...
computes approximate mode of multivariate hypergeometric .
train
false
26,958
def get_cipher(): from libnacl import crypto_secretbox_KEYBYTES as KEYLEN from libnacl.secret import SecretBox def decrypt(ciphertext, key): 'Decrypt ciphertext using key.' return SecretBox(key).decrypt(ciphertext) return (KEYLEN, decrypt)
[ "def", "get_cipher", "(", ")", ":", "from", "libnacl", "import", "crypto_secretbox_KEYBYTES", "as", "KEYLEN", "from", "libnacl", ".", "secret", "import", "SecretBox", "def", "decrypt", "(", "ciphertext", ",", "key", ")", ":", "return", "SecretBox", "(", "key",...
return decryption function and length of key .
train
false
26,959
def _LookupTargets(names, mapping): return [mapping[name] for name in names if (name in mapping)]
[ "def", "_LookupTargets", "(", "names", ",", "mapping", ")", ":", "return", "[", "mapping", "[", "name", "]", "for", "name", "in", "names", "if", "(", "name", "in", "mapping", ")", "]" ]
returns a list of the mapping[name] for each value in |names| that is in |mapping| .
train
false
26,960
@handle_response_format @treeio_login_required @_process_mass_lead_form def lead_index_assigned(request, response_format='html'): query = Q(status__hidden=False, assigned=request.user.profile) if request.GET: if (('status' in request.GET) and request.GET['status']): query = _get_filter_query(request.GET) else: query = (query & _get_filter_query(request.GET)) statuses = Object.filter_by_request(request, SaleStatus.objects, mode='r') leads = Object.filter_by_request(request, Lead.objects.filter(query), mode='r') filters = LeadFilterForm(request.user.profile, '', request.GET) massform = LeadMassActionForm(request.user.profile) return render_to_response('sales/lead_index_assigned', {'leads': leads, 'filters': filters, 'massform': massform, 'statuses': statuses}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "_process_mass_lead_form", "def", "lead_index_assigned", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "query", "=", "Q", "(", "status__hidden", "=", "False", ",", "assigned", "=", ...
leads owned by current user .
train
false
26,961
def decode_request(orig_offset, data): offset = orig_offset if ((len(data) - offset) < 4): return (orig_offset, None) (version, cmd, rsv, address_type) = struct.unpack_from('!BBBB', data, offset) offset += 4 assert (version == SOCKS_VERSION), (version, SOCKS_VERSION) assert (rsv == 0) (offset, destination_address) = __decode_address(address_type, offset, data) if (not destination_address): return (orig_offset, None) if ((len(data) - offset) < 2): return (orig_offset, None) (destination_port,) = struct.unpack_from('!H', data, offset) offset += 2 return (offset, Request(version, cmd, rsv, address_type, destination_address, destination_port))
[ "def", "decode_request", "(", "orig_offset", ",", "data", ")", ":", "offset", "=", "orig_offset", "if", "(", "(", "len", "(", "data", ")", "-", "offset", ")", "<", "4", ")", ":", "return", "(", "orig_offset", ",", "None", ")", "(", "version", ",", ...
try to decode a socks5 request .
train
false
26,963
def is_object_transient_sysmeta(key): if (len(key) <= len(OBJECT_TRANSIENT_SYSMETA_PREFIX)): return False return key.lower().startswith(OBJECT_TRANSIENT_SYSMETA_PREFIX)
[ "def", "is_object_transient_sysmeta", "(", "key", ")", ":", "if", "(", "len", "(", "key", ")", "<=", "len", "(", "OBJECT_TRANSIENT_SYSMETA_PREFIX", ")", ")", ":", "return", "False", "return", "key", ".", "lower", "(", ")", ".", "startswith", "(", "OBJECT_T...
tests if a header key starts with and is longer than the prefix for object transient system metadata .
train
false
26,964
def show_employee(emp_id, fields=None): ret = {} if (fields is None): fields = ','.join(('canUploadPhoto', 'department', 'displayName', 'firstName', 'id', 'jobTitle', 'lastName', 'location', 'mobilePhone', 'nickname', 'photoUploaded', 'photoUrl', 'workEmail', 'workPhone', 'workPhoneExtension')) (status, result) = _query(action='employees', command=emp_id, args={'fields': fields}) root = ET.fromstring(result) items = root.getchildren() ret = {'id': emp_id} for item in items: ret[item.items()[0][1]] = item.text return ret
[ "def", "show_employee", "(", "emp_id", ",", "fields", "=", "None", ")", ":", "ret", "=", "{", "}", "if", "(", "fields", "is", "None", ")", ":", "fields", "=", "','", ".", "join", "(", "(", "'canUploadPhoto'", ",", "'department'", ",", "'displayName'", ...
show all employees for this company .
train
true
26,965
def obtain_os_version(show_ver): match = re.search('Cisco IOS Software.*Version (.+?),', show_ver) if match: return match.group(1) else: return None
[ "def", "obtain_os_version", "(", "show_ver", ")", ":", "match", "=", "re", ".", "search", "(", "'Cisco IOS Software.*Version (.+?),'", ",", "show_ver", ")", "if", "match", ":", "return", "match", ".", "group", "(", "1", ")", "else", ":", "return", "None" ]
obtain the os version from the show version output returns os version string or none .
train
false
26,966
def localize(value): return force_unicode(formats.localize(value, use_l10n=True))
[ "def", "localize", "(", "value", ")", ":", "return", "force_unicode", "(", "formats", ".", "localize", "(", "value", ",", "use_l10n", "=", "True", ")", ")" ]
checks if value is a localizable type and returns it formatted as a string using current locale format .
train
false
26,967
def term(name): cmd = 's6-svc -t {0}'.format(_service_path(name)) return (not __salt__['cmd.retcode'](cmd))
[ "def", "term", "(", "name", ")", ":", "cmd", "=", "'s6-svc -t {0}'", ".", "format", "(", "_service_path", "(", "name", ")", ")", "return", "(", "not", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ")", ")" ]
send a term to service via daemontools cli example: .
train
false
26,969
def draw_dendrogram(node, imlist, filename='clusters.jpg'): rows = (node.get_height() * 20) cols = 1200 s = (float((cols - 150)) / node.get_depth()) im = Image.new('RGB', (cols, rows), (255, 255, 255)) draw = ImageDraw.Draw(im) draw.line((0, (rows / 2), 20, (rows / 2)), fill=(0, 0, 0)) node.draw(draw, 20, (rows / 2), s, imlist, im) im.save(filename) im.show()
[ "def", "draw_dendrogram", "(", "node", ",", "imlist", ",", "filename", "=", "'clusters.jpg'", ")", ":", "rows", "=", "(", "node", ".", "get_height", "(", ")", "*", "20", ")", "cols", "=", "1200", "s", "=", "(", "float", "(", "(", "cols", "-", "150"...
draw a cluster dendrogram and save to a file .
train
false
26,970
def log_request(handler): status = handler.get_status() request = handler.request if ((status == 304) or ((status < 300) and isinstance(handler, StaticFileHandler))): log_method = access_log.debug elif (status < 400): log_method = access_log.info elif (status < 500): log_method = access_log.warning else: log_method = access_log.error uri = _scrub_uri(request.uri) headers = _scrub_headers(request.headers) request_time = (1000.0 * handler.request.request_time()) user = handler.get_current_user() ns = dict(status=status, method=request.method, ip=request.remote_ip, uri=uri, request_time=request_time, user=(user.name if user else '')) msg = '{status} {method} {uri} ({user}@{ip}) {request_time:.2f}ms' if ((status >= 500) and (status != 502)): log_method(json.dumps(headers, indent=2)) log_method(msg.format(**ns))
[ "def", "log_request", "(", "handler", ")", ":", "status", "=", "handler", ".", "get_status", "(", ")", "request", "=", "handler", ".", "request", "if", "(", "(", "status", "==", "304", ")", "or", "(", "(", "status", "<", "300", ")", "and", "isinstanc...
log a bit more information about each request than tornados default - move static file get success to debug-level - get proxied ip instead of proxy ip - log referer for redirect and failed requests - log user-agent for failed requests .
train
false
26,971
def user_id_exists(user_id, context): model = context['model'] session = context['session'] result = session.query(model.User).get(user_id) if (not result): raise Invalid(('%s: %s' % (_('Not found'), _('User')))) return user_id
[ "def", "user_id_exists", "(", "user_id", ",", "context", ")", ":", "model", "=", "context", "[", "'model'", "]", "session", "=", "context", "[", "'session'", "]", "result", "=", "session", ".", "query", "(", "model", ".", "User", ")", ".", "get", "(", ...
raises invalid if the given user_id does not exist in the model given in the context .
train
false
26,972
def mini(description, applicationName='PythonMini', noteType='Message', title='Mini Message', applicationIcon=None, hostname='localhost', password=None, port=23053, sticky=False, priority=None, callback=None, notificationIcon=None, identifier=None, notifierFactory=GrowlNotifier): try: growl = notifierFactory(applicationName=applicationName, notifications=[noteType], defaultNotifications=[noteType], applicationIcon=applicationIcon, hostname=hostname, password=password, port=port) result = growl.register() if (result is not True): return result return growl.notify(noteType=noteType, title=title, description=description, icon=notificationIcon, sticky=sticky, priority=priority, callback=callback, identifier=identifier) except Exception: logger.exception('Growl error')
[ "def", "mini", "(", "description", ",", "applicationName", "=", "'PythonMini'", ",", "noteType", "=", "'Message'", ",", "title", "=", "'Mini Message'", ",", "applicationIcon", "=", "None", ",", "hostname", "=", "'localhost'", ",", "password", "=", "None", ",",...
single notification function simple notification function in one line .
train
false
26,973
@apply_to_text_file def minify_lines(data): return data
[ "@", "apply_to_text_file", "def", "minify_lines", "(", "data", ")", ":", "return", "data" ]
do nothing -- deprecated filter .
train
false
26,974
@task def copy_release_files(): with cd('/home/vagrant/repos/sympy'): run('mkdir -p /vagrant/release') run('cp dist/* /vagrant/release/')
[ "@", "task", "def", "copy_release_files", "(", ")", ":", "with", "cd", "(", "'/home/vagrant/repos/sympy'", ")", ":", "run", "(", "'mkdir -p /vagrant/release'", ")", "run", "(", "'cp dist/* /vagrant/release/'", ")" ]
move the release files from the vm to release/ locally .
train
false
26,976
def categorical_accuracy(predictions, targets, top_k=1): if (targets.ndim == predictions.ndim): targets = theano.tensor.argmax(targets, axis=(-1)) elif (targets.ndim != (predictions.ndim - 1)): raise TypeError('rank mismatch between targets and predictions') if (top_k == 1): top = theano.tensor.argmax(predictions, axis=(-1)) return theano.tensor.eq(top, targets) else: top = theano.tensor.argsort(predictions, axis=(-1)) top = top[([slice(None) for _ in range((top.ndim - 1))] + [slice((- top_k), None)])] targets = theano.tensor.shape_padaxis(targets, axis=(-1)) return theano.tensor.any(theano.tensor.eq(top, targets), axis=(-1))
[ "def", "categorical_accuracy", "(", "predictions", ",", "targets", ",", "top_k", "=", "1", ")", ":", "if", "(", "targets", ".", "ndim", "==", "predictions", ".", "ndim", ")", ":", "targets", "=", "theano", ".", "tensor", ".", "argmax", "(", "targets", ...
computes the categorical accuracy between predictions and targets .
train
false
26,977
def _slugify(value): import unicodedata value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode() value = _slugify_strip_re.sub('', value).strip().lower() return _slugify_hyphenate_re.sub('-', value)
[ "def", "_slugify", "(", "value", ")", ":", "import", "unicodedata", "value", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "value", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ".", "decode", "(", ")", "value", "=", "_slugify_str...
normalizes string .
train
false
26,978
@require_GET def pdt(request, item_check_callable=None, template='pdt/pdt.html', context=None): context = (context or {}) pdt_obj = None txn_id = request.GET.get('tx') failed = False if (txn_id is not None): try: pdt_obj = PayPalPDT.objects.get(txn_id=txn_id) except PayPalPDT.DoesNotExist: pass if (pdt_obj is None): form = PayPalPDTForm(request.GET) if form.is_valid(): try: pdt_obj = form.save(commit=False) except Exception as e: error = repr(e) failed = True else: error = form.errors failed = True if failed: pdt_obj = PayPalPDT() pdt_obj.set_flag(('Invalid form. %s' % error)) pdt_obj.initialize(request) if (not failed): pdt_obj.verify(item_check_callable) else: pass context.update({'failed': failed, 'pdt_obj': pdt_obj}) return render_to_response(template, context, RequestContext(request))
[ "@", "require_GET", "def", "pdt", "(", "request", ",", "item_check_callable", "=", "None", ",", "template", "=", "'pdt/pdt.html'", ",", "context", "=", "None", ")", ":", "context", "=", "(", "context", "or", "{", "}", ")", "pdt_obj", "=", "None", "txn_id...
payment data transfer implementation: URL .
train
false
26,980
def _get_fname(fname): if ('-#-' in fname): fname = fname.split('-#-')[0] else: fname = op.basename(fname) fname = (' ... %s' % fname) return fname
[ "def", "_get_fname", "(", "fname", ")", ":", "if", "(", "'-#-'", "in", "fname", ")", ":", "fname", "=", "fname", ".", "split", "(", "'-#-'", ")", "[", "0", "]", "else", ":", "fname", "=", "op", ".", "basename", "(", "fname", ")", "fname", "=", ...
get fname without -#- .
train
false
26,981
def strip_object_transient_sysmeta_prefix(key): return key[len(OBJECT_TRANSIENT_SYSMETA_PREFIX):]
[ "def", "strip_object_transient_sysmeta_prefix", "(", "key", ")", ":", "return", "key", "[", "len", "(", "OBJECT_TRANSIENT_SYSMETA_PREFIX", ")", ":", "]" ]
removes the object transient system metadata prefix from the start of a header key .
train
false
26,982
def _lstrip_word(word, prefix): if str(word).startswith(prefix): return str(word)[len(prefix):] return word
[ "def", "_lstrip_word", "(", "word", ",", "prefix", ")", ":", "if", "str", "(", "word", ")", ".", "startswith", "(", "prefix", ")", ":", "return", "str", "(", "word", ")", "[", "len", "(", "prefix", ")", ":", "]", "return", "word" ]
return a copy of the string after the specified prefix was removed from the beginning of the string .
train
false
26,984
def rosen_hess(x): x = atleast_1d(x) H = (numpy.diag(((-400) * x[:(-1)]), 1) - numpy.diag((400 * x[:(-1)]), (-1))) diagonal = numpy.zeros(len(x), dtype=x.dtype) diagonal[0] = (((1200 * (x[0] ** 2)) - (400 * x[1])) + 2) diagonal[(-1)] = 200 diagonal[1:(-1)] = ((202 + (1200 * (x[1:(-1)] ** 2))) - (400 * x[2:])) H = (H + numpy.diag(diagonal)) return H
[ "def", "rosen_hess", "(", "x", ")", ":", "x", "=", "atleast_1d", "(", "x", ")", "H", "=", "(", "numpy", ".", "diag", "(", "(", "(", "-", "400", ")", "*", "x", "[", ":", "(", "-", "1", ")", "]", ")", ",", "1", ")", "-", "numpy", ".", "di...
the hessian matrix of the rosenbrock function .
train
false
26,985
def test_cle_gdb(): mappath = os.path.join(test_location, '../test_data/test_gdb_plugin/procmap') p = angr.Project(binpath, load_options={'gdb_map': mappath}) check_addrs(p)
[ "def", "test_cle_gdb", "(", ")", ":", "mappath", "=", "os", ".", "path", ".", "join", "(", "test_location", ",", "'../test_data/test_gdb_plugin/procmap'", ")", "p", "=", "angr", ".", "Project", "(", "binpath", ",", "load_options", "=", "{", "'gdb_map'", ":",...
test for info proc mappings .
train
false
26,987
def raise_invalid_tag(vlan_str, vlan_range): raise n_exc.NetworkVlanRangeError(vlan_range=vlan_range, error=(_('%s is not a valid VLAN tag') % vlan_str))
[ "def", "raise_invalid_tag", "(", "vlan_str", ",", "vlan_range", ")", ":", "raise", "n_exc", ".", "NetworkVlanRangeError", "(", "vlan_range", "=", "vlan_range", ",", "error", "=", "(", "_", "(", "'%s is not a valid VLAN tag'", ")", "%", "vlan_str", ")", ")" ]
raise an exception for invalid tag .
train
false
26,988
def randint(low, high=None, size=None): if (high is None): lo = 0 hi = low else: lo = low hi = high if (lo >= hi): raise ValueError('low >= high') diff = ((hi - lo) - 1) rs = generator.get_random_state() return (lo + rs.interval(diff, size))
[ "def", "randint", "(", "low", ",", "high", "=", "None", ",", "size", "=", "None", ")", ":", "if", "(", "high", "is", "None", ")", ":", "lo", "=", "0", "hi", "=", "low", "else", ":", "lo", "=", "low", "hi", "=", "high", "if", "(", "lo", ">="...
returns a random integer x with minvalue <= x <= maxvalue .
train
false
26,989
def _parse_cell_type_mapping(mapping): if (mapping is None): return None results = {} for entry in mapping.split(','): if (':' not in entry): raise stem.ProtocolError(("Mappings are expected to be of the form 'key:value', got '%s': %s" % (entry, mapping))) (key, value) = entry.split(':', 1) if (not CELL_TYPE.match(key)): raise stem.ProtocolError(("Key had invalid characters, got '%s': %s" % (key, mapping))) elif (not value.isdigit()): raise stem.ProtocolError(("Values should just be integers, got '%s': %s" % (value, mapping))) results[key] = int(value) return results
[ "def", "_parse_cell_type_mapping", "(", "mapping", ")", ":", "if", "(", "mapping", "is", "None", ")", ":", "return", "None", "results", "=", "{", "}", "for", "entry", "in", "mapping", ".", "split", "(", "','", ")", ":", "if", "(", "':'", "not", "in",...
parses a mapping of the form .
train
false
26,990
def parse_isoduration(duration): result = iso_duration_re.match(duration) if (not result): raise ValueError(_('Only ISO 8601 duration format of the form PT#H#M#S is supported.')) t = 0 t += ((3600 * int(result.group(1))) if result.group(1) else 0) t += ((60 * int(result.group(2))) if result.group(2) else 0) t += (int(result.group(3)) if result.group(3) else 0) return t
[ "def", "parse_isoduration", "(", "duration", ")", ":", "result", "=", "iso_duration_re", ".", "match", "(", "duration", ")", "if", "(", "not", "result", ")", ":", "raise", "ValueError", "(", "_", "(", "'Only ISO 8601 duration format of the form PT#H#M#S is supported...
convert duration in iso 8601 format to second(s) .
train
false
26,991
def _angle_between_quats(x, y): x0 = np.sqrt(np.maximum((((1.0 - (x[..., 0] ** 2)) - (x[..., 1] ** 2)) - (x[..., 2] ** 2)), 0.0)) y0 = np.sqrt(np.maximum((((1.0 - (y[..., 0] ** 2)) - (y[..., 1] ** 2)) - (y[..., 2] ** 2)), 0.0)) z0 = np.maximum(np.minimum(((y0 * x0) + (x * y).sum(axis=(-1))), 1.0), (-1)) return (2 * np.arccos(z0))
[ "def", "_angle_between_quats", "(", "x", ",", "y", ")", ":", "x0", "=", "np", ".", "sqrt", "(", "np", ".", "maximum", "(", "(", "(", "(", "1.0", "-", "(", "x", "[", "...", ",", "0", "]", "**", "2", ")", ")", "-", "(", "x", "[", "...", ","...
compute the ang between two quaternions w/3-element representations .
train
false
26,993
def create_detailed_pickling_error(exception, instance): attribute = None def can_pickle(data): try: cPickle.dumps(v) except: return False else: return True if hasattr(instance, '__dict__'): for (k, v) in instance.__dict__.iteritems(): if (not can_pickle(v)): attribute = k break elif isinstance(instance, dict): for (k, v) in instance.iteritems(): if (not can_pickle(v)): attribute = k break elif isinstance(instance, (tuple, list)): for (i, v) in enumerate(instance): if (not can_pickle(v)): attribute = ('index-%s' % i) break wrapped = DetailedMaybeEncodingError(exception, instance, attribute) debug(('Possible encoding error while sending result: %s' % wrapped)) return wrapped
[ "def", "create_detailed_pickling_error", "(", "exception", ",", "instance", ")", ":", "attribute", "=", "None", "def", "can_pickle", "(", "data", ")", ":", "try", ":", "cPickle", ".", "dumps", "(", "v", ")", "except", ":", "return", "False", "else", ":", ...
maybeencodingerror - picklingerror: cant pickle dictproxy #8748 .
train
false
26,997
def _compress_group_index(group_index, sort=True): size_hint = min(len(group_index), _hash._SIZE_HINT_LIMIT) table = _hash.Int64HashTable(size_hint) group_index = _ensure_int64(group_index) (comp_ids, obs_group_ids) = table.get_labels_groupby(group_index) if (sort and (len(obs_group_ids) > 0)): (obs_group_ids, comp_ids) = _reorder_by_uniques(obs_group_ids, comp_ids) return (comp_ids, obs_group_ids)
[ "def", "_compress_group_index", "(", "group_index", ",", "sort", "=", "True", ")", ":", "size_hint", "=", "min", "(", "len", "(", "group_index", ")", ",", "_hash", ".", "_SIZE_HINT_LIMIT", ")", "table", "=", "_hash", ".", "Int64HashTable", "(", "size_hint", ...
group_index is offsets into cartesian product of all possible labels .
train
false
26,998
def GetAgeTupleFromRequest(request, default_days=90): now = int((time.time() * 1000000.0)) default_start = (now - ((((60 * 60) * 24) * 1000000.0) * default_days)) start_time = int(request.REQ.get('start_time', default_start)) end_time = int(request.REQ.get('end_time', now)) return (start_time, end_time)
[ "def", "GetAgeTupleFromRequest", "(", "request", ",", "default_days", "=", "90", ")", ":", "now", "=", "int", "(", "(", "time", ".", "time", "(", ")", "*", "1000000.0", ")", ")", "default_start", "=", "(", "now", "-", "(", "(", "(", "(", "60", "*",...
check the request for start/end times and return aff4 age tuple .
train
false
26,999
def get_test_provider_dir(provider): provider_dir = os.path.join(TEST_PROVIDERS_DOWNLOAD_DIR, provider) if (not provider_dir): os.makedirs(provider_dir) return provider_dir
[ "def", "get_test_provider_dir", "(", "provider", ")", ":", "provider_dir", "=", "os", ".", "path", ".", "join", "(", "TEST_PROVIDERS_DOWNLOAD_DIR", ",", "provider", ")", "if", "(", "not", "provider_dir", ")", ":", "os", ".", "makedirs", "(", "provider_dir", ...
return a specific test providers dir .
train
false
27,000
@receiver(m2m_changed, sender=Sale.products.through) def sale_update_products(sender, instance, action, *args, **kwargs): if (action == u'post_add'): instance.update_products()
[ "@", "receiver", "(", "m2m_changed", ",", "sender", "=", "Sale", ".", "products", ".", "through", ")", "def", "sale_update_products", "(", "sender", ",", "instance", ",", "action", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "action", "...
signal for updating products for the sale - needed since the products wont be assigned to the sale when it is first saved .
train
false
27,001
def _escape_node(node): result = [] for (i, char) in enumerate(node): if (char == u'\\'): if (u''.join(node[i:(i + 3)]) in JID_ESCAPE_SEQUENCES): result.append(u'\\5c') continue result.append(char) for (i, char) in enumerate(result): if (char != u'\\'): result[i] = JID_ESCAPE_TRANSFORMATIONS.get(char, char) escaped = u''.join(result) if (escaped.startswith(u'\\20') or escaped.endswith(u'\\20')): raise InvalidJID(u'Escaped local part starts or ends with "\\20"') _validate_node(escaped) return escaped
[ "def", "_escape_node", "(", "node", ")", ":", "result", "=", "[", "]", "for", "(", "i", ",", "char", ")", "in", "enumerate", "(", "node", ")", ":", "if", "(", "char", "==", "u'\\\\'", ")", ":", "if", "(", "u''", ".", "join", "(", "node", "[", ...
escape the local portion of a jid .
train
false
27,002
def get_file(fname, origin, untar=False, md5_hash=None, cache_subdir='datasets'): datadir_base = os.path.expanduser(os.path.join('~', '.keras')) if (not os.access(datadir_base, os.W_OK)): datadir_base = os.path.join('/tmp', '.keras') datadir = os.path.join(datadir_base, cache_subdir) if (not os.path.exists(datadir)): os.makedirs(datadir) if untar: untar_fpath = os.path.join(datadir, fname) fpath = (untar_fpath + '.tar.gz') else: fpath = os.path.join(datadir, fname) download = False if os.path.exists(fpath): if (md5_hash is not None): if (not validate_file(fpath, md5_hash)): print('A local file was found, but it seems to be incomplete or outdated.') download = True else: download = True if download: print('Downloading data from', origin) progbar = None def dl_progress(count, block_size, total_size, progbar=None): if (progbar is None): progbar = Progbar(total_size) else: progbar.update((count * block_size)) error_msg = 'URL fetch failure on {}: {} -- {}' try: try: urlretrieve(origin, fpath, functools.partial(dl_progress, progbar=progbar)) except URLError as e: raise Exception(error_msg.format(origin, e.errno, e.reason)) except HTTPError as e: raise Exception(error_msg.format(origin, e.code, e.msg)) except (Exception, KeyboardInterrupt) as e: if os.path.exists(fpath): os.remove(fpath) raise progbar = None if untar: if (not os.path.exists(untar_fpath)): print('Untaring file...') tfile = tarfile.open(fpath, 'r:gz') try: tfile.extractall(path=datadir) except (Exception, KeyboardInterrupt) as e: if os.path.exists(untar_fpath): if os.path.isfile(untar_fpath): os.remove(untar_fpath) else: shutil.rmtree(untar_fpath) raise tfile.close() return untar_fpath return fpath
[ "def", "get_file", "(", "fname", ",", "origin", ",", "untar", "=", "False", ",", "md5_hash", "=", "None", ",", "cache_subdir", "=", "'datasets'", ")", ":", "datadir_base", "=", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "join", ...
get filename for static file .
train
true
27,003
def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs): if leftfunc: A = map(leftfunc, A) if rightfunc: B = map(rightfunc, B) return sum(map(partial(np.dot, **kwargs), A, B))
[ "def", "dotmany", "(", "A", ",", "B", ",", "leftfunc", "=", "None", ",", "rightfunc", "=", "None", ",", "**", "kwargs", ")", ":", "if", "leftfunc", ":", "A", "=", "map", "(", "leftfunc", ",", "A", ")", "if", "rightfunc", ":", "B", "=", "map", "...
dot product of many aligned chunks .
train
false
27,005
def reservation_commit(context, reservations, project_id=None, user_id=None): return IMPL.reservation_commit(context, reservations, project_id=project_id, user_id=user_id)
[ "def", "reservation_commit", "(", "context", ",", "reservations", ",", "project_id", "=", "None", ",", "user_id", "=", "None", ")", ":", "return", "IMPL", ".", "reservation_commit", "(", "context", ",", "reservations", ",", "project_id", "=", "project_id", ","...
commit quota reservations .
train
false
27,006
def argmax_random_tie(seq, func): return random.choice(argmax_list(seq, func))
[ "def", "argmax_random_tie", "(", "seq", ",", "func", ")", ":", "return", "random", ".", "choice", "(", "argmax_list", "(", "seq", ",", "func", ")", ")" ]
return an element with highest func score; break ties at random .
train
false
27,007
def _validate_timedelta_unit(arg): try: return _unit_map[arg] except: if (arg is None): return 'ns' raise ValueError('invalid timedelta unit {0} provided'.format(arg))
[ "def", "_validate_timedelta_unit", "(", "arg", ")", ":", "try", ":", "return", "_unit_map", "[", "arg", "]", "except", ":", "if", "(", "arg", "is", "None", ")", ":", "return", "'ns'", "raise", "ValueError", "(", "'invalid timedelta unit {0} provided'", ".", ...
provide validation / translation for timedelta short units .
train
false
27,008
def http_date(timestamp=None): return _dump_date(timestamp, ' ')
[ "def", "http_date", "(", "timestamp", "=", "None", ")", ":", "return", "_dump_date", "(", "timestamp", ",", "' '", ")" ]
formats the time to match the rfc1123 date format as specified by http rfc2616 section 3 .
train
false
27,009
def build_audit_info(parent_audit_id=None): audit_id = random_urlsafe_str() if (parent_audit_id is not None): return [audit_id, parent_audit_id] return [audit_id]
[ "def", "build_audit_info", "(", "parent_audit_id", "=", "None", ")", ":", "audit_id", "=", "random_urlsafe_str", "(", ")", "if", "(", "parent_audit_id", "is", "not", "None", ")", ":", "return", "[", "audit_id", ",", "parent_audit_id", "]", "return", "[", "au...
build the audit data for a token .
train
false
27,010
def cftype_to_value(cftype): if (not cftype): return None typeID = cf.CFGetTypeID(cftype) if (typeID in known_cftypes): convert_function = known_cftypes[typeID] return convert_function(cftype) else: return cftype
[ "def", "cftype_to_value", "(", "cftype", ")", ":", "if", "(", "not", "cftype", ")", ":", "return", "None", "typeID", "=", "cf", ".", "CFGetTypeID", "(", "cftype", ")", "if", "(", "typeID", "in", "known_cftypes", ")", ":", "convert_function", "=", "known_...
convert a cftype into an equivalent python type .
train
true
27,011
def get_system_total_memory_gb(): import os import sys if (u'linux' in sys.platform): with open(u'/proc/meminfo', u'r') as f_in: meminfo_lines = f_in.readlines() mem_total_line = [line for line in meminfo_lines if (u'MemTotal' in line)][0] mem_total = float(mem_total_line.split()[1]) memory_gb = (mem_total / (1024.0 ** 2)) elif (u'darwin' in sys.platform): mem_str = os.popen(u'sysctl hw.memsize').read().strip().split(u' ')[(-1)] memory_gb = (float(mem_str) / (1024.0 ** 3)) else: err_msg = u'System platform: %s is not supported' raise Exception(err_msg) return memory_gb
[ "def", "get_system_total_memory_gb", "(", ")", ":", "import", "os", "import", "sys", "if", "(", "u'linux'", "in", "sys", ".", "platform", ")", ":", "with", "open", "(", "u'/proc/meminfo'", ",", "u'r'", ")", "as", "f_in", ":", "meminfo_lines", "=", "f_in", ...
function to get the total ram of the running system in gb .
train
false
27,012
def encode_bin(v): return v
[ "def", "encode_bin", "(", "v", ")", ":", "return", "v" ]
encodes a bytearray into serialization .
train
false
27,013
def get_embeddings(options, word_idict, f_emb, use_norm=False): d = OrderedDict() for i in range(options['n_words']): caption = [i] ff = f_emb(numpy.array(caption).reshape(1, 1)).flatten() if use_norm: ff /= norm(ff) d[word_idict[i]] = ff return d
[ "def", "get_embeddings", "(", "options", ",", "word_idict", ",", "f_emb", ",", "use_norm", "=", "False", ")", ":", "d", "=", "OrderedDict", "(", ")", "for", "i", "in", "range", "(", "options", "[", "'n_words'", "]", ")", ":", "caption", "=", "[", "i"...
extract rnn embeddings from the lookup layer of the model function modified from: URL .
train
false
27,014
def _munge_to_length(string, min_length, max_length): if (len(string) < min_length): string += ('_' * (min_length - len(string))) if (len(string) > max_length): string = string[:max_length] return string
[ "def", "_munge_to_length", "(", "string", ",", "min_length", ",", "max_length", ")", ":", "if", "(", "len", "(", "string", ")", "<", "min_length", ")", ":", "string", "+=", "(", "'_'", "*", "(", "min_length", "-", "len", "(", "string", ")", ")", ")",...
pad/truncates a string .
train
false
27,015
def _options_dir(name): _check_portname(name) _root = '/var/db/ports' new_dir = os.path.join(_root, name.replace('/', '_')) old_dir = os.path.join(_root, name.split('/')[(-1)]) if os.path.isdir(old_dir): return old_dir return new_dir
[ "def", "_options_dir", "(", "name", ")", ":", "_check_portname", "(", "name", ")", "_root", "=", "'/var/db/ports'", "new_dir", "=", "os", ".", "path", ".", "join", "(", "_root", ",", "name", ".", "replace", "(", "'/'", ",", "'_'", ")", ")", "old_dir", ...
retrieve the path to the dir containing options file for a given port .
train
true
27,016
def randomDeterministic(Ts): numA = len(Ts) dim = len(Ts[0]) choices = (rand(dim) * numA).astype(int) policy = zeros((dim, numA)) for (si, a) in choices: policy[(si, a)] = 1 return (policy, collapsedTransitions(Ts, policy))
[ "def", "randomDeterministic", "(", "Ts", ")", ":", "numA", "=", "len", "(", "Ts", ")", "dim", "=", "len", "(", "Ts", "[", "0", "]", ")", "choices", "=", "(", "rand", "(", "dim", ")", "*", "numA", ")", ".", "astype", "(", "int", ")", "policy", ...
pick a random deterministic action for each state .
train
false
27,017
def rollback(): connection._rollback() set_clean()
[ "def", "rollback", "(", ")", ":", "connection", ".", "_rollback", "(", ")", "set_clean", "(", ")" ]
rolls back a transaction .
train
false
27,018
def dejsonize(data): return json.loads(data)
[ "def", "dejsonize", "(", "data", ")", ":", "return", "json", ".", "loads", "(", "data", ")" ]
returns json deserialized data .
train
false
27,020
def prettify_name(name): return name.replace('_', ' ').title()
[ "def", "prettify_name", "(", "name", ")", ":", "return", "name", ".", "replace", "(", "'_'", ",", "' '", ")", ".", "title", "(", ")" ]
prettify pythonic variable name .
train
false
27,021
def FancyAnalyzer(expression='\\s+', stoplist=STOP_WORDS, minsize=2, maxsize=None, gaps=True, splitwords=True, splitnums=True, mergewords=False, mergenums=False): ret = RegexTokenizer(expression=expression, gaps=gaps) iwf = IntraWordFilter(splitwords=splitwords, splitnums=splitnums, mergewords=mergewords, mergenums=mergenums) lcf = LowercaseFilter() swf = StopFilter(stoplist=stoplist, minsize=minsize) return (((ret | iwf) | lcf) | swf)
[ "def", "FancyAnalyzer", "(", "expression", "=", "'\\\\s+'", ",", "stoplist", "=", "STOP_WORDS", ",", "minsize", "=", "2", ",", "maxsize", "=", "None", ",", "gaps", "=", "True", ",", "splitwords", "=", "True", ",", "splitnums", "=", "True", ",", "mergewor...
composes a regextokenizer with an intrawordfilter .
train
false
27,022
def test_ros_sample_wt_fit(): ros = RandomOverSampler(random_state=RND_SEED) assert_raises(RuntimeError, ros.sample, X, Y)
[ "def", "test_ros_sample_wt_fit", "(", ")", ":", "ros", "=", "RandomOverSampler", "(", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "ros", ".", "sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when sample is called before fitting .
train
false
27,023
def _error_code_to_str(mod, type_, code): (_, c_name) = _get_error_names(mod, type_, code) return ('%s(%d)' % (c_name, code))
[ "def", "_error_code_to_str", "(", "mod", ",", "type_", ",", "code", ")", ":", "(", "_", ",", "c_name", ")", "=", "_get_error_names", "(", "mod", ",", "type_", ",", "code", ")", "return", "(", "'%s(%d)'", "%", "(", "c_name", ",", "code", ")", ")" ]
this method is registered as ofp_error_code_to_str method into ryu .
train
true
27,024
def atfork(): _UserFriendlyRNG.reinit()
[ "def", "atfork", "(", ")", ":", "_UserFriendlyRNG", ".", "reinit", "(", ")" ]
call this whenever you call os .
train
false
27,027
def hb_read(file): def _get_matrix(fid): hb = HBFile(fid) return hb.read_matrix() if isinstance(file, string_types): fid = open(file) try: return _get_matrix(fid) finally: fid.close() else: return _get_matrix(file)
[ "def", "hb_read", "(", "file", ")", ":", "def", "_get_matrix", "(", "fid", ")", ":", "hb", "=", "HBFile", "(", "fid", ")", "return", "hb", ".", "read_matrix", "(", ")", "if", "isinstance", "(", "file", ",", "string_types", ")", ":", "fid", "=", "op...
read hb-format file .
train
false