id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
33,227
def lstrip(s): return s.lstrip()
[ "def", "lstrip", "(", "s", ")", ":", "return", "s", ".", "lstrip", "(", ")" ]
lstrip -> string return a copy of the string s with leading whitespace removed .
train
false
33,229
def network_info(): cmd = __salt__['cmd.run_all']('racadm getniccfg') if (cmd['retcode'] != 0): log.warning("racadm return an exit code '{0}'.".format(cmd['retcode'])) return __parse_drac(cmd['stdout'])
[ "def", "network_info", "(", ")", ":", "cmd", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'racadm getniccfg'", ")", "if", "(", "cmd", "[", "'retcode'", "]", "!=", "0", ")", ":", "log", ".", "warning", "(", "\"racadm return an exit code '{0}'.\"", ".", ...
return network configuration cli example: .
train
false
33,230
def node_completed_status(checknode): u' TODO: place this in the base.py file and refactor ' node_state_does_not_require_overwrite = ((checknode.overwrite is False) or ((checknode.overwrite is None) and (not checknode._interface.always_run))) hash_exists = False try: (hash_exists, _, _, _) = checknode.hash_exists() except Exception: hash_exists = False return (hash_exists and node_state_does_not_require_overwrite)
[ "def", "node_completed_status", "(", "checknode", ")", ":", "node_state_does_not_require_overwrite", "=", "(", "(", "checknode", ".", "overwrite", "is", "False", ")", "or", "(", "(", "checknode", ".", "overwrite", "is", "None", ")", "and", "(", "not", "checkno...
a function to determine if a node has previously completed its work .
train
false
33,232
def return_values(obj): if isinstance(obj, (text_type, binary_type)): if obj: (yield to_native(obj, errors='surrogate_or_strict')) return elif isinstance(obj, SEQUENCETYPE): for element in obj: for subelement in return_values(element): (yield subelement) elif isinstance(obj, Mapping): for element in obj.items(): for subelement in return_values(element[1]): (yield subelement) elif isinstance(obj, (bool, NoneType)): return elif isinstance(obj, NUMBERTYPES): (yield to_native(obj, nonstring='simplerepr')) else: raise TypeError(('Unknown parameter type: %s, %s' % (type(obj), obj)))
[ "def", "return_values", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "text_type", ",", "binary_type", ")", ")", ":", "if", "obj", ":", "(", "yield", "to_native", "(", "obj", ",", "errors", "=", "'surrogate_or_strict'", ")", ")", "ret...
return native stringified values from datastructures .
train
false
33,233
def _fwd_bem_multi_solution(solids, gamma, nps): pi2 = (1.0 / (2 * np.pi)) n_tot = np.sum(nps) assert (solids.shape == (n_tot, n_tot)) nsurf = len(nps) defl = (1.0 / n_tot) offsets = np.cumsum(np.concatenate(([0], nps))) for si_1 in range(nsurf): for si_2 in range(nsurf): mult = (pi2 if (gamma is None) else (pi2 * gamma[(si_1, si_2)])) slice_j = slice(offsets[si_1], offsets[(si_1 + 1)]) slice_k = slice(offsets[si_2], offsets[(si_2 + 1)]) solids[(slice_j, slice_k)] = (defl - (solids[(slice_j, slice_k)] * mult)) solids += np.eye(n_tot) return linalg.inv(solids, overwrite_a=True)
[ "def", "_fwd_bem_multi_solution", "(", "solids", ",", "gamma", ",", "nps", ")", ":", "pi2", "=", "(", "1.0", "/", "(", "2", "*", "np", ".", "pi", ")", ")", "n_tot", "=", "np", ".", "sum", "(", "nps", ")", "assert", "(", "solids", ".", "shape", ...
do multi surface solution .
train
false
33,234
def test_disallow_inequality_comparisons(): t = table.Table() with pytest.raises(TypeError): (t > 2) with pytest.raises(TypeError): (t < 1.1) with pytest.raises(TypeError): (t >= 5.5) with pytest.raises(TypeError): (t <= (-1.1))
[ "def", "test_disallow_inequality_comparisons", "(", ")", ":", "t", "=", "table", ".", "Table", "(", ")", "with", "pytest", ".", "raises", "(", "TypeError", ")", ":", "(", "t", ">", "2", ")", "with", "pytest", ".", "raises", "(", "TypeError", ")", ":", ...
regression test for #828 - disallow comparison operators on whole table .
train
false
33,235
def _IntersectTwoHandlers(first_handler, second_handler): shared_prefix = _SharedPrefix(first_handler.pattern, second_handler.pattern) if shared_prefix: return _HandleCommonPrefix(first_handler, second_handler, shared_prefix) shared_suffix = _SharedSuffix(first_handler.pattern, second_handler.pattern) if shared_suffix: return _HandleCommonSuffix(first_handler, second_handler, shared_suffix) handler_set = set() handler_set |= _HandleWildcardCases(first_handler, second_handler) handler_set |= _HandleWildcardCases(second_handler, first_handler) handler_set |= set([first_handler, second_handler]) return handler_set
[ "def", "_IntersectTwoHandlers", "(", "first_handler", ",", "second_handler", ")", ":", "shared_prefix", "=", "_SharedPrefix", "(", "first_handler", ".", "pattern", ",", "second_handler", ".", "pattern", ")", "if", "shared_prefix", ":", "return", "_HandleCommonPrefix",...
returns intersections of first_handler and second_handler patterns .
train
false
33,236
def isLoopIntersectingInsideXSegment(loop, segmentFirstX, segmentSecondX, segmentYMirror, y): rotatedLoop = getPointsRoundZAxis(segmentYMirror, loop) for pointIndex in xrange(len(rotatedLoop)): pointFirst = rotatedLoop[pointIndex] pointSecond = rotatedLoop[((pointIndex + 1) % len(rotatedLoop))] if isLineIntersectingInsideXSegment(pointFirst, pointSecond, segmentFirstX, segmentSecondX, y): return True return False
[ "def", "isLoopIntersectingInsideXSegment", "(", "loop", ",", "segmentFirstX", ",", "segmentSecondX", ",", "segmentYMirror", ",", "y", ")", ":", "rotatedLoop", "=", "getPointsRoundZAxis", "(", "segmentYMirror", ",", "loop", ")", "for", "pointIndex", "in", "xrange", ...
determine if the loop is intersecting inside the x segment .
train
false
33,237
@register_uncanonicalize @gof.local_optimizer([DimShuffle]) def local_dimshuffle_alloc(node): if (isinstance(node.op, DimShuffle) and node.inputs[0].owner): input_ = node.inputs[0] if isinstance(input_.owner.op, T.Alloc): new_order = node.op.new_order expected_new_order = ((('x',) * (len(new_order) - input_.ndim)) + tuple(range(input_.ndim))) if (new_order != expected_new_order): return False nb_new_dims = (len(new_order) - input_.ndim) new_shape_input = (((1,) * nb_new_dims) + tuple(input_.owner.inputs[1:])) return [T.alloc(input_.owner.inputs[0], *new_shape_input)] return False
[ "@", "register_uncanonicalize", "@", "gof", ".", "local_optimizer", "(", "[", "DimShuffle", "]", ")", "def", "local_dimshuffle_alloc", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "DimShuffle", ")", "and", "node", ".", "inpu...
if an alloc is inside a dimshuffle which only adds dimension to the left .
train
false
33,238
def decryptData(key, data, mode=AESModeOfOperation.ModeOfOperation[u'CBC']): key = bytearray(key) keysize = len(key) assert (keysize in AES.KeySize.values()), (u'invalid key size: %s' % keysize) iv = bytearray(data[:16]) data = bytearray(data[16:]) moo = AESModeOfOperation() decr = moo.decrypt(data, None, mode, key, keysize, iv) if (mode == AESModeOfOperation.ModeOfOperation[u'CBC']): decr = strip_PKCS7_padding(decr) return bytes(decr)
[ "def", "decryptData", "(", "key", ",", "data", ",", "mode", "=", "AESModeOfOperation", ".", "ModeOfOperation", "[", "u'CBC'", "]", ")", ":", "key", "=", "bytearray", "(", "key", ")", "keysize", "=", "len", "(", "key", ")", "assert", "(", "keysize", "in...
module function to decrypt the given data with the given key .
train
false
33,239
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
create a new tdes cipher .
train
false
33,240
def add_handler(app, handler_name, func, key=None): handler_adder = getattr(app, handler_name) handler_funcs_name = '{0}_funcs'.format(handler_name) handler_funcs = getattr(app, handler_funcs_name) if (func not in handler_funcs.get(key, [])): handler_adder(func)
[ "def", "add_handler", "(", "app", ",", "handler_name", ",", "func", ",", "key", "=", "None", ")", ":", "handler_adder", "=", "getattr", "(", "app", ",", "handler_name", ")", "handler_funcs_name", "=", "'{0}_funcs'", ".", "format", "(", "handler_name", ")", ...
add handler to flask application if handler has not already been added .
train
false
33,241
def draw(): get_current_fig_manager().canvas.draw()
[ "def", "draw", "(", ")", ":", "get_current_fig_manager", "(", ")", ".", "canvas", ".", "draw", "(", ")" ]
redraw the current figure .
train
false
33,242
@pytest.fixture def xonsh_execer(monkeypatch): monkeypatch.setattr(xonsh.built_ins, 'load_builtins', (lambda *args, **kwargs: None)) execer = Execer(login=False, unload=False) builtins.__xonsh_execer__ = execer return execer
[ "@", "pytest", ".", "fixture", "def", "xonsh_execer", "(", "monkeypatch", ")", ":", "monkeypatch", ".", "setattr", "(", "xonsh", ".", "built_ins", ",", "'load_builtins'", ",", "(", "lambda", "*", "args", ",", "**", "kwargs", ":", "None", ")", ")", "exece...
initiate the execer with a mocked nop load_builtins .
train
false
33,243
@slow_test @testing.requires_testing_data def test_write_source_space(): tempdir = _TempDir() src0 = read_source_spaces(fname, patch_stats=False) write_source_spaces(op.join(tempdir, 'tmp-src.fif'), src0) src1 = read_source_spaces(op.join(tempdir, 'tmp-src.fif'), patch_stats=False) _compare_source_spaces(src0, src1) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') src_badname = op.join(tempdir, 'test-bad-name.fif.gz') write_source_spaces(src_badname, src0) read_source_spaces(src_badname) assert_naming(w, 'test_source_space.py', 2)
[ "@", "slow_test", "@", "testing", ".", "requires_testing_data", "def", "test_write_source_space", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "src0", "=", "read_source_spaces", "(", "fname", ",", "patch_stats", "=", "False", ")", "write_source_spaces", ...
test reading and writing of source spaces .
train
false
33,244
@utils.expects_func_args('image_id', 'instance') def delete_image_on_error(function): @functools.wraps(function) def decorated_function(self, context, image_id, instance, *args, **kwargs): try: return function(self, context, image_id, instance, *args, **kwargs) except Exception: with excutils.save_and_reraise_exception(): LOG.debug('Cleaning up image %s', image_id, exc_info=True, instance=instance) try: self.image_api.delete(context, image_id) except exception.ImageNotFound: pass except Exception: LOG.exception(_LE('Error while trying to clean up image %s'), image_id, instance=instance) return decorated_function
[ "@", "utils", ".", "expects_func_args", "(", "'image_id'", ",", "'instance'", ")", "def", "delete_image_on_error", "(", "function", ")", ":", "@", "functools", ".", "wraps", "(", "function", ")", "def", "decorated_function", "(", "self", ",", "context", ",", ...
used for snapshot related method to ensure the image created in compute .
train
false
33,245
def _check_response_error(e, message_type): if (e.error_code != BOTO_INSTANCE_NOT_FOUND): raise e Message.new(message_type=message_type, reason=e.error_code).write()
[ "def", "_check_response_error", "(", "e", ",", "message_type", ")", ":", "if", "(", "e", ".", "error_code", "!=", "BOTO_INSTANCE_NOT_FOUND", ")", ":", "raise", "e", "Message", ".", "new", "(", "message_type", "=", "message_type", ",", "reason", "=", "e", "...
check if an exception is a transient one .
train
false
33,246
def getMatrixKeys(prefix=''): matrixKeys = [] for row in xrange(4): for column in xrange(4): key = getMatrixKey(row, column, prefix) matrixKeys.append(key) return matrixKeys
[ "def", "getMatrixKeys", "(", "prefix", "=", "''", ")", ":", "matrixKeys", "=", "[", "]", "for", "row", "in", "xrange", "(", "4", ")", ":", "for", "column", "in", "xrange", "(", "4", ")", ":", "key", "=", "getMatrixKey", "(", "row", ",", "column", ...
get the matrix keys .
train
false
33,247
def course_context_from_course_id(course_id): if (course_id is None): return {'course_id': '', 'org_id': ''} assert isinstance(course_id, CourseKey) return {'course_id': course_id.to_deprecated_string(), 'org_id': course_id.org}
[ "def", "course_context_from_course_id", "(", "course_id", ")", ":", "if", "(", "course_id", "is", "None", ")", ":", "return", "{", "'course_id'", ":", "''", ",", "'org_id'", ":", "''", "}", "assert", "isinstance", "(", "course_id", ",", "CourseKey", ")", "...
creates a course context from a course_id .
train
false
33,248
def get_cpu_stat(key): stats = [] stat_file = open('/proc/stat', 'r') line = stat_file.readline() while line: if line.startswith(key): stats = line.split()[1:] break line = stat_file.readline() return stats
[ "def", "get_cpu_stat", "(", "key", ")", ":", "stats", "=", "[", "]", "stat_file", "=", "open", "(", "'/proc/stat'", ",", "'r'", ")", "line", "=", "stat_file", ".", "readline", "(", ")", "while", "line", ":", "if", "line", ".", "startswith", "(", "key...
get load per cpu from /proc/stat :return: list of values of cpu times .
train
false
33,249
def SetConfigOptions(): config_opts = {} flag_defaults = {} if os.environ.get('GRR_CONFIG_FILE'): flag_defaults['config'] = os.environ.get('GRR_CONFIG_FILE') elif defaults.CONFIG_FILE: flag_defaults['config'] = defaults.CONFIG_FILE else: flag_defaults['config'] = config_lib.Resource().Filter('install_data/etc/grr-server.yaml') for (option, value) in config_opts.items(): config_lib.CONFIG.Set(option, value) flags.PARSER.set_defaults(**flag_defaults)
[ "def", "SetConfigOptions", "(", ")", ":", "config_opts", "=", "{", "}", "flag_defaults", "=", "{", "}", "if", "os", ".", "environ", ".", "get", "(", "'GRR_CONFIG_FILE'", ")", ":", "flag_defaults", "[", "'config'", "]", "=", "os", ".", "environ", ".", "...
set location of configuration flags .
train
false
33,251
def interp(net, layers): for l in layers: (m, k, h, w) = net.params[l][0].data.shape if ((m != k) and (k != 1)): print 'input + output channels need to be the same or |output| == 1' raise if (h != w): print 'filters need to be square' raise filt = upsample_filt(h) net.params[l][0].data[range(m), range(k), :, :] = filt
[ "def", "interp", "(", "net", ",", "layers", ")", ":", "for", "l", "in", "layers", ":", "(", "m", ",", "k", ",", "h", ",", "w", ")", "=", "net", ".", "params", "[", "l", "]", "[", "0", "]", ".", "data", ".", "shape", "if", "(", "(", "m", ...
set weights of each layer in layers to bilinear kernels for interpolation .
train
true
33,252
def find_intermediate_color(lowcolor, highcolor, intermed): diff_0 = float((highcolor[0] - lowcolor[0])) diff_1 = float((highcolor[1] - lowcolor[1])) diff_2 = float((highcolor[2] - lowcolor[2])) return ((lowcolor[0] + (intermed * diff_0)), (lowcolor[1] + (intermed * diff_1)), (lowcolor[2] + (intermed * diff_2)))
[ "def", "find_intermediate_color", "(", "lowcolor", ",", "highcolor", ",", "intermed", ")", ":", "diff_0", "=", "float", "(", "(", "highcolor", "[", "0", "]", "-", "lowcolor", "[", "0", "]", ")", ")", "diff_1", "=", "float", "(", "(", "highcolor", "[", ...
returns the color at a given distance between two colors this function takes two color tuples .
train
false
33,253
def iter_copy(structure): l = [] for i in structure: if hasattr(i, '__iter__'): l.append(iter_copy(i)) else: l.append(i) return l
[ "def", "iter_copy", "(", "structure", ")", ":", "l", "=", "[", "]", "for", "i", "in", "structure", ":", "if", "hasattr", "(", "i", ",", "'__iter__'", ")", ":", "l", ".", "append", "(", "iter_copy", "(", "i", ")", ")", "else", ":", "l", ".", "ap...
returns a copy of an iterable object .
train
false
33,254
@pytest.mark.django_db def test_get_path_obj(rf, po_directory, default, tp0): language_code = tp0.language.code project_code = tp0.project.code language_code_fake = 'faf' project_code_fake = 'fake-tutorial' request = rf.get('/') request.user = default func = get_path_obj((lambda x, y: (x, y))) func(request, project_code=project_code) assert isinstance(request.ctx_obj, Project) with pytest.raises(Http404): func(request, project_code=project_code_fake) func(request, language_code=language_code) assert isinstance(request.ctx_obj, Language) with pytest.raises(Http404): func(request, language_code=language_code_fake) func(request, language_code=language_code, project_code=project_code) assert isinstance(request.ctx_obj, TranslationProject) with pytest.raises(Http404): func(request, language_code=language_code_fake, project_code=project_code)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_get_path_obj", "(", "rf", ",", "po_directory", ",", "default", ",", "tp0", ")", ":", "language_code", "=", "tp0", ".", "language", ".", "code", "project_code", "=", "tp0", ".", "project", ".", "c...
ensure the correct path object is retrieved .
train
false
33,255
def log_sum_exp(input_tensor, reduction_indices=None, keep_dims=False): input_tensor = tf.convert_to_tensor(input_tensor) dependencies = [tf.verify_tensor_all_finite(input_tensor, msg='')] input_tensor = control_flow_ops.with_dependencies(dependencies, input_tensor) x_max = tf.reduce_max(input_tensor, reduction_indices, keep_dims=True) return (tf.squeeze(x_max) + tf.log(tf.reduce_sum(tf.exp((input_tensor - x_max)), reduction_indices, keep_dims)))
[ "def", "log_sum_exp", "(", "input_tensor", ",", "reduction_indices", "=", "None", ",", "keep_dims", "=", "False", ")", ":", "input_tensor", "=", "tf", ".", "convert_to_tensor", "(", "input_tensor", ")", "dependencies", "=", "[", "tf", ".", "verify_tensor_all_fin...
compute the log_sum_exp of elements in a tensor .
train
false
33,256
def _type_pprint(obj, p, cycle): if ([m for m in _get_mro(type(obj)) if ('__repr__' in vars(m))][:1] != [type]): _repr_pprint(obj, p, cycle) return mod = _safe_getattr(obj, '__module__', None) try: name = obj.__qualname__ if (not isinstance(name, str)): raise Exception('Try __name__') except Exception: name = obj.__name__ if (not isinstance(name, str)): name = '<unknown type>' if (mod in (None, '__builtin__', 'builtins', 'exceptions')): p.text(name) else: p.text(((mod + '.') + name))
[ "def", "_type_pprint", "(", "obj", ",", "p", ",", "cycle", ")", ":", "if", "(", "[", "m", "for", "m", "in", "_get_mro", "(", "type", "(", "obj", ")", ")", "if", "(", "'__repr__'", "in", "vars", "(", "m", ")", ")", "]", "[", ":", "1", "]", "...
the pprint for classes and types .
train
false
33,257
def formset(*args, **kw): prefix = kw.pop('prefix', 'form') total_count = kw.pop('total_count', len(args)) initial_count = kw.pop('initial_count', len(args)) data = {(prefix + '-TOTAL_FORMS'): total_count, (prefix + '-INITIAL_FORMS'): initial_count} for (idx, d) in enumerate(args): data.update(((('%s-%s-%s' % (prefix, idx, k)), v) for (k, v) in d.items())) data.update(kw) return data
[ "def", "formset", "(", "*", "args", ",", "**", "kw", ")", ":", "prefix", "=", "kw", ".", "pop", "(", "'prefix'", ",", "'form'", ")", "total_count", "=", "kw", ".", "pop", "(", "'total_count'", ",", "len", "(", "args", ")", ")", "initial_count", "="...
build up a formset-happy post .
train
false
33,258
def CDLHIKKAKE(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLHIKKAKE)
[ "def", "CDLHIKKAKE", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLHIKKAKE", ")" ]
hikkake pattern .
train
false
33,259
def _flat_palette(color, n_colors=6, reverse=False, as_cmap=False, input='rgb'): color = _color_to_rgb(color, input) flat = desaturate(color, 0) colors = ([color, flat] if reverse else [flat, color]) return blend_palette(colors, n_colors, as_cmap)
[ "def", "_flat_palette", "(", "color", ",", "n_colors", "=", "6", ",", "reverse", "=", "False", ",", "as_cmap", "=", "False", ",", "input", "=", "'rgb'", ")", ":", "color", "=", "_color_to_rgb", "(", "color", ",", "input", ")", "flat", "=", "desaturate"...
make a sequential palette that blends from gray to color .
train
false
33,260
def _package_dict(package): result = {'name': package.name, 'arch': package.arch, 'epoch': str(package.epoch), 'release': package.release, 'version': package.version, 'repo': package.repoid} result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format(**result) return result
[ "def", "_package_dict", "(", "package", ")", ":", "result", "=", "{", "'name'", ":", "package", ".", "name", ",", "'arch'", ":", "package", ".", "arch", ",", "'epoch'", ":", "str", "(", "package", ".", "epoch", ")", ",", "'release'", ":", "package", ...
return a dictionary of information for the package .
train
false
33,264
@not_implemented_for('directed') def articulation_points(G): seen = set() for articulation in _biconnected_dfs(G, components=False): if (articulation not in seen): seen.add(articulation) (yield articulation)
[ "@", "not_implemented_for", "(", "'directed'", ")", "def", "articulation_points", "(", "G", ")", ":", "seen", "=", "set", "(", ")", "for", "articulation", "in", "_biconnected_dfs", "(", "G", ",", "components", "=", "False", ")", ":", "if", "(", "articulati...
yield the articulation points .
train
false
33,265
def multiple_file_nmds(input_dir, output_dir, dimensions=2): if (not os.path.exists(output_dir)): os.makedirs(output_dir) file_names = os.listdir(input_dir) file_names = [fname for fname in file_names if (not fname.startswith('.'))] for fname in file_names: (base_fname, ext) = os.path.splitext(fname) infile = os.path.join(input_dir, fname) lines = open(infile, 'U') nmds_res_string = nmds(lines, dimensions) outfile = os.path.join(output_dir, (('nmds_' + base_fname) + '.txt')) outfile = open(outfile, 'w') outfile.write(nmds_res_string) outfile.close()
[ "def", "multiple_file_nmds", "(", "input_dir", ",", "output_dir", ",", "dimensions", "=", "2", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "output_dir", ")", ")", ":", "os", ".", "makedirs", "(", "output_dir", ")", "file_names", ...
perform pcoas on all distance matrices in the input_dir .
train
false
33,267
@register.function @jinja2.contextfunction def remora_url(context, url, lang=None, app=None, prefix=''): if (lang is None): _lang = context['LANG'] if _lang: lang = to_locale(_lang).replace('_', '-') if (app is None): try: app = context['APP'].short except (AttributeError, KeyError): pass return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix)
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "remora_url", "(", "context", ",", "url", ",", "lang", "=", "None", ",", "app", "=", "None", ",", "prefix", "=", "''", ")", ":", "if", "(", "lang", "is", "None", ")", "...
wrapper for urlresolvers .
train
false
33,268
@pytest.mark.cmd @pytest.mark.django_db def test_find_duplicate_emails_noemails(capfd, member, member2): call_command('find_duplicate_emails') (out, err) = capfd.readouterr() assert ('The following users have no email set' in out) assert ('member ' in out) assert ('member2' in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_find_duplicate_emails_noemails", "(", "capfd", ",", "member", ",", "member2", ")", ":", "call_command", "(", "'find_duplicate_emails'", ")", "(", "out", ",", "...
user have no email set .
train
false
33,269
def read_signal_probs(file): f = open(file) lines = f.readlines() f.close() flow_probs = defaultdict(list) flow_logs = defaultdict(list) for line in lines: if line.startswith('#'): continue for (i, num) in enumerate(line.strip().split()[2::2]): flow_probs[i].append(float(num)) for (i, num) in enumerate(line.strip().split()[1::2]): flow_logs[i].append(float(num)) for p in flow_probs: s = sum(flow_probs[p]) flow_probs[p] = [(i / s) for i in flow_probs[p]] return (flow_probs, flow_logs)
[ "def", "read_signal_probs", "(", "file", ")", ":", "f", "=", "open", "(", "file", ")", "lines", "=", "f", ".", "readlines", "(", ")", "f", ".", "close", "(", ")", "flow_probs", "=", "defaultdict", "(", "list", ")", "flow_logs", "=", "defaultdict", "(...
read and check the signal probabilty file .
train
false
33,270
def has_metaclass(parent): for node in parent.children: if (node.type == syms.suite): return has_metaclass(node) elif ((node.type == syms.simple_stmt) and node.children): expr_node = node.children[0] if ((expr_node.type == syms.expr_stmt) and expr_node.children): left_side = expr_node.children[0] if (isinstance(left_side, Leaf) and (left_side.value == '__metaclass__')): return True return False
[ "def", "has_metaclass", "(", "parent", ")", ":", "for", "node", "in", "parent", ".", "children", ":", "if", "(", "node", ".", "type", "==", "syms", ".", "suite", ")", ":", "return", "has_metaclass", "(", "node", ")", "elif", "(", "(", "node", ".", ...
we have to check the cls_node without changing it .
train
true
33,271
def add_headers_to_response(response): if ('X-Frame-Options' not in response.headers): response.headers['X-Frame-Options'] = 'SAMEORIGIN' elif (response.headers['X-Frame-Options'] == 'ALLOWALL'): del response.headers['X-Frame-Options'] if ('X-Content-Type-Options' not in response.headers): response.headers['X-Content-Type-Options'] = 'nosniff' if ('X-XSS-Protection' not in response.headers): response.headers['X-XSS-Protection'] = '1; mode=block' if ('content-security-policy-report-only' not in response.headers): response.headers['content-security-policy-report-only'] = "default-src 'self';script-src 'self' assets.gratipay.com 'unsafe-inline';style-src 'self' assets.gratipay.com downloads.gratipay.com cloud.typography.com;img-src *;font-src 'self' assets.gratipay.com cloud.typography.com data:;upgrade-insecure-requests;block-all-mixed-content;reflected-xss block;report-uri https://gratipay.report-uri.io/r/default/csp/reportOnly;"
[ "def", "add_headers_to_response", "(", "response", ")", ":", "if", "(", "'X-Frame-Options'", "not", "in", "response", ".", "headers", ")", ":", "response", ".", "headers", "[", "'X-Frame-Options'", "]", "=", "'SAMEORIGIN'", "elif", "(", "response", ".", "heade...
add security headers .
train
false
33,272
def test_gcrs_altaz(): from .. import EarthLocation (ra, dec, _) = randomly_sample_sphere(1) gcrs = GCRS(ra=ra[0], dec=dec[0], obstime=u'J2000') times = Time((np.linspace(2456293.25, 2456657.25, 51) * u.day), format=u'jd', scale=u'utc') loc = EarthLocation(lon=(10 * u.deg), lat=(80.0 * u.deg)) aaframe = AltAz(obstime=times, location=loc) aa1 = gcrs.transform_to(aaframe) aa2 = gcrs.transform_to(ICRS).transform_to(CIRS).transform_to(aaframe) aa3 = gcrs.transform_to(ITRS).transform_to(CIRS).transform_to(aaframe) assert_allclose(aa1.alt, aa2.alt) assert_allclose(aa1.az, aa2.az) assert_allclose(aa1.alt, aa3.alt) assert_allclose(aa1.az, aa3.az)
[ "def", "test_gcrs_altaz", "(", ")", ":", "from", ".", ".", "import", "EarthLocation", "(", "ra", ",", "dec", ",", "_", ")", "=", "randomly_sample_sphere", "(", "1", ")", "gcrs", "=", "GCRS", "(", "ra", "=", "ra", "[", "0", "]", ",", "dec", "=", "...
check gcrs<->altaz transforms for round-tripping .
train
false
33,273
def truncate_flowgrams_in_SFF(flowgrams, header, outhandle=None, outdir='/tmp/', barcode_mapping=None, primer=None, allow_num_ambigous=4): out_filename = '' if (not outhandle): (fd, out_filename) = mkstemp(dir=outdir, prefix='trunc_sff', suffix='.sff.txt') close(fd) outhandle = open(out_filename, 'w') write_sff_header(header, outhandle) l = 0 for f in flowgrams: qual_trimmed_flowgram = f.getQualityTrimmedFlowgram() if barcode_mapping: if (f.Name in barcode_mapping): trunc_flowgram = qual_trimmed_flowgram.getPrimerTrimmedFlowgram(primerseq=((DEFAULT_KEYSEQ + barcode_mapping[f.Name]) + primer)) else: continue else: prim = DEFAULT_KEYSEQ if primer: prim += primer trunc_flowgram = qual_trimmed_flowgram.getPrimerTrimmedFlowgram(primerseq=prim) if (trunc_flowgram is not None): outhandle.write((trunc_flowgram.createFlowHeader() + '\n')) l += 1 return (out_filename, l)
[ "def", "truncate_flowgrams_in_SFF", "(", "flowgrams", ",", "header", ",", "outhandle", "=", "None", ",", "outdir", "=", "'/tmp/'", ",", "barcode_mapping", "=", "None", ",", "primer", "=", "None", ",", "allow_num_ambigous", "=", "4", ")", ":", "out_filename", ...
truncate flowgrams at low quality 3 end and strip key+primers .
train
false
33,275
@cli.command('sharpen') @click.option('-f', '--factor', default=2.0, help='Sharpens the image.', show_default=True) @processor def sharpen_cmd(images, factor): for image in images: click.echo(('Sharpen "%s" by %f' % (image.filename, factor))) enhancer = ImageEnhance.Sharpness(image) (yield copy_filename(enhancer.enhance(max(1.0, factor)), image))
[ "@", "cli", ".", "command", "(", "'sharpen'", ")", "@", "click", ".", "option", "(", "'-f'", ",", "'--factor'", ",", "default", "=", "2.0", ",", "help", "=", "'Sharpens the image.'", ",", "show_default", "=", "True", ")", "@", "processor", "def", "sharpe...
sharpens an image .
train
false
33,276
def add_threading_args(parser): parser.add_argument(u'--threads', u'-T', default=DEFAULT_N_THREADS, type=int, help=u'Number of simultaneous threads to execute')
[ "def", "add_threading_args", "(", "parser", ")", ":", "parser", ".", "add_argument", "(", "u'--threads'", ",", "u'-T'", ",", "default", "=", "DEFAULT_N_THREADS", ",", "type", "=", "int", ",", "help", "=", "u'Number of simultaneous threads to execute'", ")" ]
add option for threading to options parser .
train
false
33,277
def findElementsWithId(node, elems=None): if (elems is None): elems = {} id = node.getAttribute('id') if (id != ''): elems[id] = node if node.hasChildNodes(): for child in node.childNodes: if (child.nodeType == 1): findElementsWithId(child, elems) return elems
[ "def", "findElementsWithId", "(", "node", ",", "elems", "=", "None", ")", ":", "if", "(", "elems", "is", "None", ")", ":", "elems", "=", "{", "}", "id", "=", "node", ".", "getAttribute", "(", "'id'", ")", "if", "(", "id", "!=", "''", ")", ":", ...
returns all elements with id attributes .
train
true
33,278
def false(*args, **kwargs): return False
[ "def", "false", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "False" ]
always returns false .
train
false
33,279
def _popen(cmd): p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE, close_fds=(os.name != 'nt'), universal_newlines=True) return p.communicate()
[ "def", "_popen", "(", "cmd", ")", ":", "p", "=", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "close_fds", "=", "(", "os", ".", "name", "!=", "'nt'", ")", ",", "universal_newlines", ...
friendly wrapper around popen for windows .
train
false
33,280
def sobel_h(image, mask=None): assert_nD(image, 2) image = img_as_float(image) result = convolve(image, HSOBEL_WEIGHTS) return _mask_filter_result(result, mask)
[ "def", "sobel_h", "(", "image", ",", "mask", "=", "None", ")", ":", "assert_nD", "(", "image", ",", "2", ")", "image", "=", "img_as_float", "(", "image", ")", "result", "=", "convolve", "(", "image", ",", "HSOBEL_WEIGHTS", ")", "return", "_mask_filter_re...
find the horizontal edges of an image using the sobel transform .
train
false
33,284
def _GenerateMSBuildFiltersFile(filters_path, source_files, rule_dependencies, extension_to_rule_name): filter_group = [] source_group = [] _AppendFiltersForMSBuild('', source_files, rule_dependencies, extension_to_rule_name, filter_group, source_group) if filter_group: content = ['Project', {'ToolsVersion': '4.0', 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}, (['ItemGroup'] + filter_group), (['ItemGroup'] + source_group)] easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) elif os.path.exists(filters_path): os.unlink(filters_path)
[ "def", "_GenerateMSBuildFiltersFile", "(", "filters_path", ",", "source_files", ",", "rule_dependencies", ",", "extension_to_rule_name", ")", ":", "filter_group", "=", "[", "]", "source_group", "=", "[", "]", "_AppendFiltersForMSBuild", "(", "''", ",", "source_files",...
generate the filters file .
train
false
33,285
def segment_range_to_fragment_range(segment_start, segment_end, segment_size, fragment_size): fragment_start = (((segment_start / segment_size) * fragment_size) if (segment_start is not None) else None) fragment_end = (None if (segment_end is None) else ((((segment_end + 1) / segment_size) * fragment_size) if (segment_start is None) else ((((segment_end + 1) / segment_size) * fragment_size) - 1))) return (fragment_start, fragment_end)
[ "def", "segment_range_to_fragment_range", "(", "segment_start", ",", "segment_end", ",", "segment_size", ",", "fragment_size", ")", ":", "fragment_start", "=", "(", "(", "(", "segment_start", "/", "segment_size", ")", "*", "fragment_size", ")", "if", "(", "segment...
takes a byterange spanning some segments and converts that into a byterange spanning the corresponding fragments within their fragment archives .
train
false
33,286
def backend_and_api_args_from_configuration(configuration): configuration = freeze(configuration) backend_name = configuration['backend'].decode('ascii') backend = backend_loader.get(backend_name) api_args = configuration.remove('backend') return (backend, api_args)
[ "def", "backend_and_api_args_from_configuration", "(", "configuration", ")", ":", "configuration", "=", "freeze", "(", "configuration", ")", "backend_name", "=", "configuration", "[", "'backend'", "]", ".", "decode", "(", "'ascii'", ")", "backend", "=", "backend_loa...
parse the dataset section of agent .
train
false
33,287
@conf.commands.register def arpcachepoison(target, victim, interval=60): tmac = getmacbyip(target) p = (Ether(dst=tmac) / ARP(op='who-has', psrc=victim, pdst=target)) try: while 1: sendp(p, iface_hint=target) if (conf.verb > 1): os.write(1, '.') time.sleep(interval) except KeyboardInterrupt: pass
[ "@", "conf", ".", "commands", ".", "register", "def", "arpcachepoison", "(", "target", ",", "victim", ",", "interval", "=", "60", ")", ":", "tmac", "=", "getmacbyip", "(", "target", ")", "p", "=", "(", "Ether", "(", "dst", "=", "tmac", ")", "/", "A...
poison targets cache with couple arpcachepoison -> none .
train
true
33,288
def decimal_relative_time(d, other=None, ndigits=0, cardinalize=True): if (other is None): other = datetime.utcnow() diff = (other - d) diff_seconds = total_seconds(diff) abs_diff = abs(diff) b_idx = (bisect.bisect(_BOUND_DELTAS, abs_diff) - 1) (bbound, bunit, bname) = _BOUNDS[b_idx] f_diff = (diff_seconds / total_seconds(bunit)) rounded_diff = round(f_diff, ndigits) if cardinalize: return (rounded_diff, _cardinalize_time_unit(bname, abs(rounded_diff))) return (rounded_diff, bname)
[ "def", "decimal_relative_time", "(", "d", ",", "other", "=", "None", ",", "ndigits", "=", "0", ",", "cardinalize", "=", "True", ")", ":", "if", "(", "other", "is", "None", ")", ":", "other", "=", "datetime", ".", "utcnow", "(", ")", "diff", "=", "(...
get a tuple representing the relative time difference between two :class:~datetime .
train
true
33,289
def is_legal_base_name(name): if (name is None): return False m = BASE_NAME_LEGAL_CHARS_P.match(name) return ((m is not None) and (m.group(0) == name))
[ "def", "is_legal_base_name", "(", "name", ")", ":", "if", "(", "name", "is", "None", ")", ":", "return", "False", "m", "=", "BASE_NAME_LEGAL_CHARS_P", ".", "match", "(", "name", ")", "return", "(", "(", "m", "is", "not", "None", ")", "and", "(", "m",...
validates that name is a legal base name for a graph resource .
train
false
33,290
@pytest.fixture def project_bar(english): return _require_project('bar', 'Bar Project', english)
[ "@", "pytest", ".", "fixture", "def", "project_bar", "(", "english", ")", ":", "return", "_require_project", "(", "'bar'", ",", "'Bar Project'", ",", "english", ")" ]
require bar test project .
train
false
33,291
def request_json(url, **kwargs): validator = kwargs.pop('validator', None) response = request_response(url, **kwargs) if (response is not None): try: result = response.json() if (validator and (not validator(result))): logger.error('JSON validation result failed') else: return result except ValueError: logger.error('Response returned invalid JSON data') if headphones.VERBOSE: server_message(response)
[ "def", "request_json", "(", "url", ",", "**", "kwargs", ")", ":", "validator", "=", "kwargs", ".", "pop", "(", "'validator'", ",", "None", ")", "response", "=", "request_response", "(", "url", ",", "**", "kwargs", ")", "if", "(", "response", "is", "not...
wrapper for request_response .
train
false
33,292
def domains_configured(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): if ((not self.domain_configs.configured) and CONF.identity.domain_specific_drivers_enabled): with self.domain_configs.lock: if (not self.domain_configs.configured): self.domain_configs.setup_domain_drivers(self.driver, self.resource_api) return f(self, *args, **kwargs) return wrapper
[ "def", "domains_configured", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "(", "not", "self", ".", "domain_configs", ".", "configured", ...
wrap api calls to lazy load domain configs after init .
train
false
33,294
def system_groovy(registry, xml_parent, data): root_tag = 'hudson.plugins.groovy.SystemGroovy' sysgroovy = XML.SubElement(xml_parent, root_tag) sysgroovy.append(_groovy_common_scriptSource(data)) XML.SubElement(sysgroovy, 'bindings').text = str(data.get('bindings', '')) XML.SubElement(sysgroovy, 'classpath').text = str(data.get('class-path', ''))
[ "def", "system_groovy", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "root_tag", "=", "'hudson.plugins.groovy.SystemGroovy'", "sysgroovy", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "root_tag", ")", "sysgroovy", ".", "append", "(", "_gr...
yaml: system-groovy execute a system groovy script or command .
train
false
33,295
@contextlib.contextmanager def use_master(): old = getattr(multidb.pinning._locals, 'pinned', False) multidb.pinning.pin_this_thread() try: (yield) finally: multidb.pinning._locals.pinned = old
[ "@", "contextlib", ".", "contextmanager", "def", "use_master", "(", ")", ":", "old", "=", "getattr", "(", "multidb", ".", "pinning", ".", "_locals", ",", "'pinned'", ",", "False", ")", "multidb", ".", "pinning", ".", "pin_this_thread", "(", ")", "try", "...
within this context .
train
false
33,296
def image_tag_get_all(context, image_id, session=None): session = (session or get_session()) tags = session.query(models.ImageTag).filter_by(image_id=image_id).filter_by(deleted=False).order_by(sqlalchemy.asc(models.ImageTag.created_at)).all() return [tag['value'] for tag in tags]
[ "def", "image_tag_get_all", "(", "context", ",", "image_id", ",", "session", "=", "None", ")", ":", "session", "=", "(", "session", "or", "get_session", "(", ")", ")", "tags", "=", "session", ".", "query", "(", "models", ".", "ImageTag", ")", ".", "fil...
get a list of tags for a specific image .
train
false
33,297
def _linear_func(t, y, a): return a.dot(y)
[ "def", "_linear_func", "(", "t", ",", "y", ",", "a", ")", ":", "return", "a", ".", "dot", "(", "y", ")" ]
linear system dy/dt = a * y .
train
false
33,300
def copyfileobj(fsrc, fdst, length=(64 * 1024)): while True: buf = fsrc.read(length) if (not buf): break fdst.write(buf)
[ "def", "copyfileobj", "(", "fsrc", ",", "fdst", ",", "length", "=", "(", "64", "*", "1024", ")", ")", ":", "while", "True", ":", "buf", "=", "fsrc", ".", "read", "(", "length", ")", "if", "(", "not", "buf", ")", ":", "break", "fdst", ".", "writ...
copy length bytes from fileobj src to fileobj dst .
train
true
33,301
def doc_image_represent(filename): if (not filename): return current.messages['NONE'] return DIV(A(IMG(_src=URL(c='default', f='download', args=filename), _height=40), _class='zoom', _href=URL(c='default', f='download', args=filename)))
[ "def", "doc_image_represent", "(", "filename", ")", ":", "if", "(", "not", "filename", ")", ":", "return", "current", ".", "messages", "[", "'NONE'", "]", "return", "DIV", "(", "A", "(", "IMG", "(", "_src", "=", "URL", "(", "c", "=", "'default'", ","...
represent an image as a clickable thumbnail .
train
false
33,304
@pytest.mark.cmd def test_test_checks_srctgt_missing_args(): with pytest.raises(CommandError) as e: call_command('test_checks', '--source="files"') assert ('Use a pair of --source and --target' in str(e)) with pytest.raises(CommandError) as e: call_command('test_checks', '--target="leers"') assert ('Use a pair of --source and --target' in str(e))
[ "@", "pytest", ".", "mark", ".", "cmd", "def", "test_test_checks_srctgt_missing_args", "(", ")", ":", "with", "pytest", ".", "raises", "(", "CommandError", ")", "as", "e", ":", "call_command", "(", "'test_checks'", ",", "'--source=\"files\"'", ")", "assert", "...
check a --source --target with incomplete options .
train
false
33,306
def constant_or_value(x, rtype, name=None, ndim=None, dtype=None): x_ = scal.convert(x, dtype=dtype) bcastable = [(d == 1) for d in x_.shape] if (ndim is not None): if (len(bcastable) < ndim): bcastable = (([True] * (ndim - len(bcastable))) + bcastable) elif (len(bcastable) > ndim): raise ValueError(('ndarray could not be cast to constant with %i dimensions' % ndim)) assert (len(bcastable) == ndim) try: if (rtype is TensorConstant): rval = rtype(TensorType(dtype=x_.dtype, broadcastable=bcastable), x_.copy(), name=name) return rval else: return rtype(TensorType(dtype=x_.dtype, broadcastable=bcastable), x_, name=name) except Exception: raise TypeError(('Could not convert %s to TensorType' % x), type(x))
[ "def", "constant_or_value", "(", "x", ",", "rtype", ",", "name", "=", "None", ",", "ndim", "=", "None", ",", "dtype", "=", "None", ")", ":", "x_", "=", "scal", ".", "convert", "(", "x", ",", "dtype", "=", "dtype", ")", "bcastable", "=", "[", "(",...
return a symbolic constant with value x .
train
false
33,307
def _import_string(names): _names = [] for (name, asname) in names: if (asname is not None): _names.append(('%s as %s' % (name, asname))) else: _names.append(name) return ', '.join(_names)
[ "def", "_import_string", "(", "names", ")", ":", "_names", "=", "[", "]", "for", "(", "name", ",", "asname", ")", "in", "names", ":", "if", "(", "asname", "is", "not", "None", ")", ":", "_names", ".", "append", "(", "(", "'%s as %s'", "%", "(", "...
return a list of formatted as a string .
train
true
33,308
def isgenerator(object): return isinstance(object, types.GeneratorType)
[ "def", "isgenerator", "(", "object", ")", ":", "return", "isinstance", "(", "object", ",", "types", ".", "GeneratorType", ")" ]
return true if the object is a generator .
train
false
33,309
def terminal_size(file=None): if (file is None): file = _get_stdout() try: s = struct.pack(str(u'HHHH'), 0, 0, 0, 0) x = fcntl.ioctl(file, termios.TIOCGWINSZ, s) (lines, width, xpixels, ypixels) = struct.unpack(str(u'HHHH'), x) if (lines > 12): lines -= 6 if (width > 10): width -= 1 if ((lines <= 0) or (width <= 0)): raise Exception(u'unable to get terminal size') return (lines, width) except Exception: try: return (int(os.environ.get(u'LINES')), int(os.environ.get(u'COLUMNS'))) except TypeError: lines = conf.max_lines width = conf.max_width if (lines is None): lines = 25 if (width is None): width = 80 return (lines, width)
[ "def", "terminal_size", "(", "file", "=", "None", ")", ":", "if", "(", "file", "is", "None", ")", ":", "file", "=", "_get_stdout", "(", ")", "try", ":", "s", "=", "struct", ".", "pack", "(", "str", "(", "u'HHHH'", ")", ",", "0", ",", "0", ",", ...
returns a tuple containing the height and width of the terminal .
train
false
33,311
def get_distribution_names(namespace_pairs, rv_base_class): distn_names = [] distn_gen_names = [] for (name, value) in namespace_pairs: if name.startswith('_'): continue if (name.endswith('_gen') and issubclass(value, rv_base_class)): distn_gen_names.append(name) if isinstance(value, rv_base_class): distn_names.append(name) return (distn_names, distn_gen_names)
[ "def", "get_distribution_names", "(", "namespace_pairs", ",", "rv_base_class", ")", ":", "distn_names", "=", "[", "]", "distn_gen_names", "=", "[", "]", "for", "(", "name", ",", "value", ")", "in", "namespace_pairs", ":", "if", "name", ".", "startswith", "("...
collect names of statistical distributions and their generators .
train
false
33,312
def ConvertAtomTimestampToEpoch(timestamp): return time.mktime(time.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.000Z'))
[ "def", "ConvertAtomTimestampToEpoch", "(", "timestamp", ")", ":", "return", "time", ".", "mktime", "(", "time", ".", "strptime", "(", "timestamp", ",", "'%Y-%m-%dT%H:%M:%S.000Z'", ")", ")" ]
helper function to convert a timestamp string .
train
false
33,314
def test_repo_fs(client, repository=None): try: nodes = client.snapshot.verify_repository(repository=repository)['nodes'] logger.debug('All nodes can write to the repository') logger.debug('Nodes with verified repository access: {0}'.format(nodes)) except Exception as e: try: if (e.status_code == 404): msg = '--- Repository "{0}" not found. Error: {1}, {2}'.format(repository, e.status_code, e.error) else: msg = '--- Got a {0} response from Elasticsearch. Error message: {1}'.format(e.status_code, e.error) except AttributeError: msg = '--- Error message: {0}'.format(e) raise ActionError('Failed to verify all nodes have repository access: {0}'.format(msg))
[ "def", "test_repo_fs", "(", "client", ",", "repository", "=", "None", ")", ":", "try", ":", "nodes", "=", "client", ".", "snapshot", ".", "verify_repository", "(", "repository", "=", "repository", ")", "[", "'nodes'", "]", "logger", ".", "debug", "(", "'...
test whether all nodes have write access to the repository :arg client: an :class:elasticsearch .
train
false
33,315
def delete_remote_branch(): branch = choose_remote_branch(N_(u'Delete Remote Branch'), N_(u'Delete'), icon=icons.discard()) if (not branch): return rgx = re.compile(u'^(?P<remote>[^/]+)/(?P<branch>.+)$') match = rgx.match(branch) if match: remote = match.group(u'remote') branch = match.group(u'branch') cmds.do(cmds.DeleteRemoteBranch, remote, branch)
[ "def", "delete_remote_branch", "(", ")", ":", "branch", "=", "choose_remote_branch", "(", "N_", "(", "u'Delete Remote Branch'", ")", ",", "N_", "(", "u'Delete'", ")", ",", "icon", "=", "icons", ".", "discard", "(", ")", ")", "if", "(", "not", "branch", "...
launch the delete remote branch dialog .
train
false
33,317
def copy_files(file_paths, destination, ignore_errors=True): for file_path in file_paths: try: shutil.copy(src=file_path, dst=destination) except IOError as e: if (not ignore_errors): raise e return True
[ "def", "copy_files", "(", "file_paths", ",", "destination", ",", "ignore_errors", "=", "True", ")", ":", "for", "file_path", "in", "file_paths", ":", "try", ":", "shutil", ".", "copy", "(", "src", "=", "file_path", ",", "dst", "=", "destination", ")", "e...
copy files to the provided destination .
train
false
33,318
def scanvars(reader, frame, locals): (vars, lasttoken, parent, prefix, value) = ([], None, None, '', __UNDEF__) for (ttype, token, start, end, line) in tokenize.generate_tokens(reader): if (ttype == tokenize.NEWLINE): break if ((ttype == tokenize.NAME) and (token not in keyword.kwlist)): if (lasttoken == '.'): if (parent is not __UNDEF__): value = getattr(parent, token, __UNDEF__) vars.append(((prefix + token), prefix, value)) else: (where, value) = lookup(token, frame, locals) vars.append((token, where, value)) elif (token == '.'): prefix += (lasttoken + '.') parent = value else: (parent, prefix) = (None, '') lasttoken = token return vars
[ "def", "scanvars", "(", "reader", ",", "frame", ",", "locals", ")", ":", "(", "vars", ",", "lasttoken", ",", "parent", ",", "prefix", ",", "value", ")", "=", "(", "[", "]", ",", "None", ",", "None", ",", "''", ",", "__UNDEF__", ")", "for", "(", ...
scan one logical line of python and look up values of variables used .
train
false
33,319
def validate_table(table_text, font_colors): font_colors_len_options = [1, 3, len(table_text)] if (len(font_colors) not in font_colors_len_options): raise exceptions.PlotlyError('Oops, font_colors should be a list of length 1, 3 or len(text)')
[ "def", "validate_table", "(", "table_text", ",", "font_colors", ")", ":", "font_colors_len_options", "=", "[", "1", ",", "3", ",", "len", "(", "table_text", ")", "]", "if", "(", "len", "(", "font_colors", ")", "not", "in", "font_colors_len_options", ")", "...
table-specific validations check that font_colors is supplied correctly (1 .
train
false
33,321
def clean_hostlist(args): hosts = [] networks = [] for i in args: if any((c.isalpha() for c in i)): i = i.split('/')[0] hosts.append(i) elif ('/' in i): networks.append(netaddr.IPNetwork(i)) else: hosts.append(i) result = [] for network in networks: if (network.size >= opts.threads): result.append(network) else: for i in network: hosts.append(str(i)) if hosts: result.append(hosts) return result
[ "def", "clean_hostlist", "(", "args", ")", ":", "hosts", "=", "[", "]", "networks", "=", "[", "]", "for", "i", "in", "args", ":", "if", "any", "(", "(", "c", ".", "isalpha", "(", ")", "for", "c", "in", "i", ")", ")", ":", "i", "=", "i", "."...
returns list of iterables examples: .
train
false
33,322
def get_resource_ref_from_model(model): try: name = model.name pack = model.pack except AttributeError: raise Exception('Cannot build ResourceReference for model: %s. Name or pack missing.', model) return ResourceReference(name=name, pack=pack)
[ "def", "get_resource_ref_from_model", "(", "model", ")", ":", "try", ":", "name", "=", "model", ".", "name", "pack", "=", "model", ".", "pack", "except", "AttributeError", ":", "raise", "Exception", "(", "'Cannot build ResourceReference for model: %s. Name or pack mis...
return a resourcereference given db_model .
train
false
33,323
def init_layman(config=None): if (config is None): config = BareConfig(read_configfile=True, quietness=1) return LaymanAPI(config)
[ "def", "init_layman", "(", "config", "=", "None", ")", ":", "if", "(", "config", "is", "None", ")", ":", "config", "=", "BareConfig", "(", "read_configfile", "=", "True", ",", "quietness", "=", "1", ")", "return", "LaymanAPI", "(", "config", ")" ]
returns the initialized laymanapi .
train
false
33,324
def superuser_required(view): @wraps(view) def decorated(request, *args, **kwargs): if (not request.user.is_authenticated()): return login(request, template_name='admin/login.html', authentication_form=AdminAuthenticationForm, extra_context={'title': _('Log in'), 'app_path': request.get_full_path(), REDIRECT_FIELD_NAME: request.get_full_path()}) if (not (request.user.is_active and request.user.is_superuser)): return render_to_response('admin/permission_denied.html', {'request': request, 'user': request.user}) return view(request, *args, **kwargs) return decorated
[ "def", "superuser_required", "(", "view", ")", ":", "@", "wraps", "(", "view", ")", "def", "decorated", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "request", ".", "user", ".", "is_authenticated", "(", ")", ")",...
wrap a view so that is only accessible to superusers .
train
false
33,326
def generate_binary_structure(rank, connectivity): if (connectivity < 1): connectivity = 1 if (rank < 1): if (connectivity < 1): return numpy.array(0, dtype=bool) else: return numpy.array(1, dtype=bool) output = numpy.fabs((numpy.indices(([3] * rank)) - 1)) output = numpy.add.reduce(output, 0) return numpy.asarray((output <= connectivity), dtype=bool)
[ "def", "generate_binary_structure", "(", "rank", ",", "connectivity", ")", ":", "if", "(", "connectivity", "<", "1", ")", ":", "connectivity", "=", "1", "if", "(", "rank", "<", "1", ")", ":", "if", "(", "connectivity", "<", "1", ")", ":", "return", "...
generate a binary structure for binary morphological operations .
train
false
33,327
def CTRL(c): assert ('@' <= c <= '_') return chr((ord(c) - ord('@')))
[ "def", "CTRL", "(", "c", ")", ":", "assert", "(", "'@'", "<=", "c", "<=", "'_'", ")", "return", "chr", "(", "(", "ord", "(", "c", ")", "-", "ord", "(", "'@'", ")", ")", ")" ]
make a control character .
train
false
33,328
def load_schema(name): data = pkgutil.get_data('jsonschema', 'schemas/{0}.json'.format(name)) return json.loads(data.decode('utf-8'))
[ "def", "load_schema", "(", "name", ")", ":", "data", "=", "pkgutil", ".", "get_data", "(", "'jsonschema'", ",", "'schemas/{0}.json'", ".", "format", "(", "name", ")", ")", "return", "json", ".", "loads", "(", "data", ".", "decode", "(", "'utf-8'", ")", ...
load a schema from .
train
true
33,329
@cython.ccall @cython.returns(cython.double) def c_call(x): return x
[ "@", "cython", ".", "ccall", "@", "cython", ".", "returns", "(", "cython", ".", "double", ")", "def", "c_call", "(", "x", ")", ":", "return", "x" ]
test that a declared return type is honoured when compiled .
train
false
33,330
def complete_skipper(cmd, line, start, end, ctx): parts = line.split(' ') skip_part_num = 0 for (i, s) in enumerate(parts): if (s in END_PROC_TOKENS): skip_part_num = (i + 1) while (len(parts) > skip_part_num): if (parts[skip_part_num] not in SKIP_TOKENS): break skip_part_num += 1 if (skip_part_num == 0): return set() if (len(parts) == (skip_part_num + 1)): comp_func = complete_command else: comp = builtins.__xonsh_shell__.shell.completer comp_func = comp.complete skip_len = (len(' '.join(line[:skip_part_num])) + 1) return comp_func(cmd, ' '.join(parts[skip_part_num:]), (start - skip_len), (end - skip_len), ctx)
[ "def", "complete_skipper", "(", "cmd", ",", "line", ",", "start", ",", "end", ",", "ctx", ")", ":", "parts", "=", "line", ".", "split", "(", "' '", ")", "skip_part_num", "=", "0", "for", "(", "i", ",", "s", ")", "in", "enumerate", "(", "parts", "...
skip over several tokens and complete based on the rest of the line .
train
false
33,332
def DuplicateFlags(flagnames=None): flag_values = gflags.FlagValues() for name in flagnames: gflags.DEFINE_boolean(name, False, ('Flag named %s' % (name,)), flag_values=flag_values) return flag_values
[ "def", "DuplicateFlags", "(", "flagnames", "=", "None", ")", ":", "flag_values", "=", "gflags", ".", "FlagValues", "(", ")", "for", "name", "in", "flagnames", ":", "gflags", ".", "DEFINE_boolean", "(", "name", ",", "False", ",", "(", "'Flag named %s'", "%"...
returns a new flagvalues object with the requested flagnames .
train
false
33,333
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
33,335
def addFaces(geometryOutput, faces): if (geometryOutput.__class__ == list): for element in geometryOutput: addFaces(element, faces) return if (geometryOutput.__class__ != dict): return for geometryOutputKey in geometryOutput.keys(): geometryOutputValue = geometryOutput[geometryOutputKey] if (geometryOutputKey == 'face'): for face in geometryOutputValue: faces.append(face) else: addFaces(geometryOutputValue, faces)
[ "def", "addFaces", "(", "geometryOutput", ",", "faces", ")", ":", "if", "(", "geometryOutput", ".", "__class__", "==", "list", ")", ":", "for", "element", "in", "geometryOutput", ":", "addFaces", "(", "element", ",", "faces", ")", "return", "if", "(", "g...
add the faces .
train
false
33,337
def tmsiReallocationCommand(): a = TpPd(pd=5) b = MessageType(mesType=26) c = LocalAreaId() d = MobileId() packet = (((a / b) / c) / d) return packet
[ "def", "tmsiReallocationCommand", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "5", ")", "b", "=", "MessageType", "(", "mesType", "=", "26", ")", "c", "=", "LocalAreaId", "(", ")", "d", "=", "MobileId", "(", ")", "packet", "=", "(", "(", "(",...
tmsi reallocation command section 9 .
train
true
33,338
def _loadExamples(path): examplesRaw = safe_load(path.getContent()) examplesMap = dict(((example['id'], example) for example in examplesRaw)) if (len(examplesRaw) != len(examplesMap)): identifiers = [example['id'] for example in examplesRaw] duplicates = list((identifier for (index, identifier) in enumerate(identifiers) if (identifiers.index(identifier) != index))) raise Exception(('Duplicate identifiers in example file: %r' % (duplicates,))) return examplesMap
[ "def", "_loadExamples", "(", "path", ")", ":", "examplesRaw", "=", "safe_load", "(", "path", ".", "getContent", "(", ")", ")", "examplesMap", "=", "dict", "(", "(", "(", "example", "[", "'id'", "]", ",", "example", ")", "for", "example", "in", "example...
read the yaml-format http session examples from the file at the given path .
train
false
33,339
def _reverse_url_pattern(url_pattern, *args): group_index = [0] def expand_group(match): group = match.group(1) try: value = str(args[group_index[0]]) group_index[0] += 1 except IndexError: raise CannotReversePattern('Not enough arguments in url tag') if (not re.match((group + '$'), value)): raise CannotReversePattern(("Value %r doesn't match (%r)" % (value, group))) return value result = re.sub('\\(([^)]+)\\)', expand_group, url_pattern.pattern) result = result.replace('^', '') result = result.replace('$', '') return result
[ "def", "_reverse_url_pattern", "(", "url_pattern", ",", "*", "args", ")", ":", "group_index", "=", "[", "0", "]", "def", "expand_group", "(", "match", ")", ":", "group", "=", "match", ".", "group", "(", "1", ")", "try", ":", "value", "=", "str", "(",...
turns a regex that matches a url back into a url by replacing the url patterns groups with the given args .
train
false
33,340
def filter_on_demands(ava, required=None, optional=None): if (required is None): required = {} lava = dict([(k.lower(), k) for k in ava.keys()]) for (attr, vals) in required.items(): attr = attr.lower() if (attr in lava): if vals: for val in vals: if (val not in ava[lava[attr]]): raise MissingValue(('Required attribute value missing: %s,%s' % (attr, val))) else: raise MissingValue(('Required attribute missing: %s' % (attr,))) if (optional is None): optional = {} oka = [k.lower() for k in required.keys()] oka.extend([k.lower() for k in optional.keys()]) for attr in lava.keys(): if (attr not in oka): del ava[lava[attr]] return ava
[ "def", "filter_on_demands", "(", "ava", ",", "required", "=", "None", ",", "optional", "=", "None", ")", ":", "if", "(", "required", "is", "None", ")", ":", "required", "=", "{", "}", "lava", "=", "dict", "(", "[", "(", "k", ".", "lower", "(", ")...
never return more than is needed .
train
true
33,341
def _stylesheet_param_dict(paramsDict, kwargsDict): paramsDict = dict(paramsDict) for (k, v) in kwargsDict.items(): if (v is not None): paramsDict[k] = v paramsDict = stylesheet_params(**paramsDict) return paramsDict
[ "def", "_stylesheet_param_dict", "(", "paramsDict", ",", "kwargsDict", ")", ":", "paramsDict", "=", "dict", "(", "paramsDict", ")", "for", "(", "k", ",", "v", ")", "in", "kwargsDict", ".", "items", "(", ")", ":", "if", "(", "v", "is", "not", "None", ...
return a copy of paramsdict .
train
true
33,342
def handleAttributes(text, parent): def attributeCallback(match): parent.set(match.group(1), match.group(2).replace(u'\n', u' ')) return ATTR_RE.sub(attributeCallback, text)
[ "def", "handleAttributes", "(", "text", ",", "parent", ")", ":", "def", "attributeCallback", "(", "match", ")", ":", "parent", ".", "set", "(", "match", ".", "group", "(", "1", ")", ",", "match", ".", "group", "(", "2", ")", ".", "replace", "(", "u...
set values of an element based on attribute definitions .
train
false
33,345
def set_makeopts(value): return set_var('MAKEOPTS', value)
[ "def", "set_makeopts", "(", "value", ")", ":", "return", "set_var", "(", "'MAKEOPTS'", ",", "value", ")" ]
set the makeopts variable return a dict containing the new value for variable:: {<variable>: {old: <old-value> .
train
false
33,346
def scan_postgrey_line(date, log, collector): m = re.match('action=(greylist|pass), reason=(.*?), (?:delay=\\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)', log) if m: (action, reason, client_name, client_address, sender, recipient) = m.groups() key = (client_address, sender) if ((action == 'greylist') and (reason == 'new')): collector['postgrey'].setdefault(recipient, {})[key] = (date, None) elif ((action == 'pass') and (reason == 'triplet found') and (key in collector['postgrey'].get(recipient, {}))): collector['postgrey'][recipient][key] = (collector['postgrey'][recipient][key][0], date)
[ "def", "scan_postgrey_line", "(", "date", ",", "log", ",", "collector", ")", ":", "m", "=", "re", ".", "match", "(", "'action=(greylist|pass), reason=(.*?), (?:delay=\\\\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)'", ",", "log", ")", "if", "m", ...
scan a postgrey log line and extract interesting data .
train
false
33,347
def get_words_by_filename(filename): filepath = join(TEST_DATA_PATH, filename) return get_words(filepath)
[ "def", "get_words_by_filename", "(", "filename", ")", ":", "filepath", "=", "join", "(", "TEST_DATA_PATH", ",", "filename", ")", "return", "get_words", "(", "filepath", ")" ]
test get_words from filepath .
train
false
33,348
def run_solvers(model, true_params, alpha, get_l1_slsqp_results, get_l1_cvxopt_results, print_summaries): results = {} results['results_ML'] = model.fit(method='newton') start_params = results['results_ML'].params.ravel(order='F') if get_l1_slsqp_results: results['results_l1_slsqp'] = model.fit_regularized(method='l1', alpha=alpha, maxiter=1000, start_params=start_params, retall=True) if get_l1_cvxopt_results: results['results_l1_cvxopt_cp'] = model.fit_regularized(method='l1_cvxopt_cp', alpha=alpha, maxiter=50, start_params=start_params, retall=True, feastol=1e-05) return results
[ "def", "run_solvers", "(", "model", ",", "true_params", ",", "alpha", ",", "get_l1_slsqp_results", ",", "get_l1_cvxopt_results", ",", "print_summaries", ")", ":", "results", "=", "{", "}", "results", "[", "'results_ML'", "]", "=", "model", ".", "fit", "(", "...
runs the solvers using the specified settings and returns a result string .
train
false
33,349
def BoolEncoder(field_number, is_repeated, is_packed): false_byte = chr(0) true_byte = chr(1) if is_packed: tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) local_EncodeVarint = _EncodeVarint def EncodePackedField(write, value): write(tag_bytes) local_EncodeVarint(write, len(value)) for element in value: if element: write(true_byte) else: write(false_byte) return EncodePackedField elif is_repeated: tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) def EncodeRepeatedField(write, value): for element in value: write(tag_bytes) if element: write(true_byte) else: write(false_byte) return EncodeRepeatedField else: tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) def EncodeField(write, value): write(tag_bytes) if value: return write(true_byte) return write(false_byte) return EncodeField
[ "def", "BoolEncoder", "(", "field_number", ",", "is_repeated", ",", "is_packed", ")", ":", "false_byte", "=", "chr", "(", "0", ")", "true_byte", "=", "chr", "(", "1", ")", "if", "is_packed", ":", "tag_bytes", "=", "TagBytes", "(", "field_number", ",", "w...
returns an encoder for a boolean field .
train
true
33,350
def package_version(package, local=False, npm='npm'): options = ['--json true', '--silent'] if local: options.append('-l') else: options.append('-g') options = ' '.join(options) with hide('running', 'stdout'): res = run(('%(npm)s list %(options)s' % locals()), pty=False) dependencies = json.loads(res).get('dependencies', {}) pkg_data = dependencies.get(package) if pkg_data: return pkg_data['version'] else: return None
[ "def", "package_version", "(", "package", ",", "local", "=", "False", ",", "npm", "=", "'npm'", ")", ":", "options", "=", "[", "'--json true'", ",", "'--silent'", "]", "if", "local", ":", "options", ".", "append", "(", "'-l'", ")", "else", ":", "option...
get the installed version of a node .
train
false
33,352
def get_filepath_or_buffer(filepath_or_buffer, encoding=None, compression=None): if _is_url(filepath_or_buffer): url = str(filepath_or_buffer) req = _urlopen(url) content_encoding = req.headers.get('Content-Encoding', None) if (content_encoding == 'gzip'): compression = 'gzip' reader = BytesIO(req.read()) return (reader, encoding, compression) if _is_s3_url(filepath_or_buffer): from pandas.io import s3 return s3.get_filepath_or_buffer(filepath_or_buffer, encoding=encoding, compression=compression) filepath_or_buffer = _stringify_path(filepath_or_buffer) return (_expand_user(filepath_or_buffer), None, compression)
[ "def", "get_filepath_or_buffer", "(", "filepath_or_buffer", ",", "encoding", "=", "None", ",", "compression", "=", "None", ")", ":", "if", "_is_url", "(", "filepath_or_buffer", ")", ":", "url", "=", "str", "(", "filepath_or_buffer", ")", "req", "=", "_urlopen"...
if the filepath_or_buffer is a url .
train
false
33,353
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
33,355
def with_app(*args, **kwargs): def generator(func): @wraps(func) def deco(*args2, **kwargs2): app = TestApp(*args, **kwargs) func(app, *args2, **kwargs2) app.cleanup() return deco return generator
[ "def", "with_app", "(", "*", "args", ",", "**", "kwargs", ")", ":", "def", "generator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "deco", "(", "*", "args2", ",", "**", "kwargs2", ")", ":", "app", "=", "TestApp", "(", "*", "a...
make a testapp with args and kwargs .
train
false