id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
45,976
def get_readme_path(file_path): dir = os.path.dirname(file_path) readme = os.path.join(REPO_ROOT, dir, 'README.md') return readme
[ "def", "get_readme_path", "(", "file_path", ")", ":", "dir", "=", "os", ".", "path", ".", "dirname", "(", "file_path", ")", "readme", "=", "os", ".", "path", ".", "join", "(", "REPO_ROOT", ",", "dir", ",", "'README.md'", ")", "return", "readme" ]
gets the readme for an associated sample file .
train
false
45,977
def group_by_field(table, name): try: col_index = table[0].index(name) except ValueError as e: raise ValueError(("Couldn't find name %s in headers: %s" % (name, table[0]))) result = defaultdict(list) for row in table[1:]: (header, state) = (row[0], row[col_index]) result[state].append(header) return result
[ "def", "group_by_field", "(", "table", ",", "name", ")", ":", "try", ":", "col_index", "=", "table", "[", "0", "]", ".", "index", "(", "name", ")", "except", "ValueError", "as", "e", ":", "raise", "ValueError", "(", "(", "\"Couldn't find name %s in headers: %s\"", "%", "(", "name", ",", "table", "[", "0", "]", ")", ")", ")", "result", "=", "defaultdict", "(", "list", ")", "for", "row", "in", "table", "[", "1", ":", "]", ":", "(", "header", ",", "state", ")", "=", "(", "row", "[", "0", "]", ",", "row", "[", "col_index", "]", ")", "result", "[", "state", "]", ".", "append", "(", "header", ")", "return", "result" ]
returns dict of field_state:[row_headers] from table .
train
false
45,978
@event.listens_for(PolymorphicVerticalProperty, 'mapper_configured', propagate=True) def on_new_class(mapper, cls_): info_dict = {} info_dict[type(None)] = (None, 'none') info_dict['none'] = (None, 'none') for k in mapper.c.keys(): col = mapper.c[k] if ('type' in col.info): (python_type, discriminator) = col.info['type'] info_dict[python_type] = (k, discriminator) info_dict[discriminator] = (k, discriminator) cls_.type_map = info_dict
[ "@", "event", ".", "listens_for", "(", "PolymorphicVerticalProperty", ",", "'mapper_configured'", ",", "propagate", "=", "True", ")", "def", "on_new_class", "(", "mapper", ",", "cls_", ")", ":", "info_dict", "=", "{", "}", "info_dict", "[", "type", "(", "None", ")", "]", "=", "(", "None", ",", "'none'", ")", "info_dict", "[", "'none'", "]", "=", "(", "None", ",", "'none'", ")", "for", "k", "in", "mapper", ".", "c", ".", "keys", "(", ")", ":", "col", "=", "mapper", ".", "c", "[", "k", "]", "if", "(", "'type'", "in", "col", ".", "info", ")", ":", "(", "python_type", ",", "discriminator", ")", "=", "col", ".", "info", "[", "'type'", "]", "info_dict", "[", "python_type", "]", "=", "(", "k", ",", "discriminator", ")", "info_dict", "[", "discriminator", "]", "=", "(", "k", ",", "discriminator", ")", "cls_", ".", "type_map", "=", "info_dict" ]
look for column objects with type info in them .
train
false
45,980
def dmp_gf_factor(f, u, K): raise NotImplementedError('multivariate polynomials over finite fields')
[ "def", "dmp_gf_factor", "(", "f", ",", "u", ",", "K", ")", ":", "raise", "NotImplementedError", "(", "'multivariate polynomials over finite fields'", ")" ]
factor multivariate polynomials over finite fields .
train
false
45,981
def discretize_center_2D(model, x_range, y_range): x = np.arange(*x_range) y = np.arange(*y_range) (x, y) = np.meshgrid(x, y) return model(x, y)
[ "def", "discretize_center_2D", "(", "model", ",", "x_range", ",", "y_range", ")", ":", "x", "=", "np", ".", "arange", "(", "*", "x_range", ")", "y", "=", "np", ".", "arange", "(", "*", "y_range", ")", "(", "x", ",", "y", ")", "=", "np", ".", "meshgrid", "(", "x", ",", "y", ")", "return", "model", "(", "x", ",", "y", ")" ]
discretize model by taking the value at the center of the pixel .
train
false
45,982
def _fs_home_problem(fs_home): if (fs_home is None): return 'FREESURFER_HOME is not set.' elif (not os.path.exists(fs_home)): return ('FREESURFER_HOME (%s) does not exist.' % fs_home) else: test_dir = os.path.join(fs_home, 'subjects', 'fsaverage') if (not os.path.exists(test_dir)): return ('FREESURFER_HOME (%s) does not contain the fsaverage subject.' % fs_home)
[ "def", "_fs_home_problem", "(", "fs_home", ")", ":", "if", "(", "fs_home", "is", "None", ")", ":", "return", "'FREESURFER_HOME is not set.'", "elif", "(", "not", "os", ".", "path", ".", "exists", "(", "fs_home", ")", ")", ":", "return", "(", "'FREESURFER_HOME (%s) does not exist.'", "%", "fs_home", ")", "else", ":", "test_dir", "=", "os", ".", "path", ".", "join", "(", "fs_home", ",", "'subjects'", ",", "'fsaverage'", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "test_dir", ")", ")", ":", "return", "(", "'FREESURFER_HOME (%s) does not contain the fsaverage subject.'", "%", "fs_home", ")" ]
check freesurfer_home path .
train
false
45,983
def parse_xpi(xpi, addon=None, check=True): path = tempfile.mkdtemp() try: xpi = get_file(xpi) extract_xpi(xpi, path) xpi_info = Extractor.parse(path) except forms.ValidationError: raise except IOError as e: if (len(e.args) < 2): (errno, strerror) = (None, e[0]) else: (errno, strerror) = e log.error('I/O error({0}): {1}'.format(errno, strerror)) raise forms.ValidationError(_('Could not parse the manifest file.')) except Exception: log.error('XPI parse error', exc_info=True) raise forms.ValidationError(_('Could not parse the manifest file.')) finally: rm_local_tmp_dir(path) if check: return check_xpi_info(xpi_info, addon) else: return xpi_info
[ "def", "parse_xpi", "(", "xpi", ",", "addon", "=", "None", ",", "check", "=", "True", ")", ":", "path", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "xpi", "=", "get_file", "(", "xpi", ")", "extract_xpi", "(", "xpi", ",", "path", ")", "xpi_info", "=", "Extractor", ".", "parse", "(", "path", ")", "except", "forms", ".", "ValidationError", ":", "raise", "except", "IOError", "as", "e", ":", "if", "(", "len", "(", "e", ".", "args", ")", "<", "2", ")", ":", "(", "errno", ",", "strerror", ")", "=", "(", "None", ",", "e", "[", "0", "]", ")", "else", ":", "(", "errno", ",", "strerror", ")", "=", "e", "log", ".", "error", "(", "'I/O error({0}): {1}'", ".", "format", "(", "errno", ",", "strerror", ")", ")", "raise", "forms", ".", "ValidationError", "(", "_", "(", "'Could not parse the manifest file.'", ")", ")", "except", "Exception", ":", "log", ".", "error", "(", "'XPI parse error'", ",", "exc_info", "=", "True", ")", "raise", "forms", ".", "ValidationError", "(", "_", "(", "'Could not parse the manifest file.'", ")", ")", "finally", ":", "rm_local_tmp_dir", "(", "path", ")", "if", "check", ":", "return", "check_xpi_info", "(", "xpi_info", ",", "addon", ")", "else", ":", "return", "xpi_info" ]
extract and parse an xpi .
train
false
45,986
def UnpackLocation(value): packed = base64hex.B64HexDecode(value) (latitude, longitude, accuracy) = struct.unpack('>ddd', packed) return Location(latitude=latitude, longitude=longitude, accuracy=accuracy)
[ "def", "UnpackLocation", "(", "value", ")", ":", "packed", "=", "base64hex", ".", "B64HexDecode", "(", "value", ")", "(", "latitude", ",", "longitude", ",", "accuracy", ")", "=", "struct", ".", "unpack", "(", "'>ddd'", ",", "packed", ")", "return", "Location", "(", "latitude", "=", "latitude", ",", "longitude", "=", "longitude", ",", "accuracy", "=", "accuracy", ")" ]
converts from a packed .
train
false
45,989
def add_to_dict(functions): def decorator(func): functions[func.__name__] = func return func return decorator
[ "def", "add_to_dict", "(", "functions", ")", ":", "def", "decorator", "(", "func", ")", ":", "functions", "[", "func", ".", "__name__", "]", "=", "func", "return", "func", "return", "decorator" ]
a decorator that adds a function to dictionary .
train
false
45,992
def path_and_line(req): (path, line) = re.match('-r (.*) \\(line (\\d+)\\)$', req.comes_from).groups() return (path, int(line))
[ "def", "path_and_line", "(", "req", ")", ":", "(", "path", ",", "line", ")", "=", "re", ".", "match", "(", "'-r (.*) \\\\(line (\\\\d+)\\\\)$'", ",", "req", ".", "comes_from", ")", ".", "groups", "(", ")", "return", "(", "path", ",", "int", "(", "line", ")", ")" ]
return the path and line number of the file from which an installrequirement came .
train
true
45,994
def telescopic_direct(L, R, n, limits): (i, a, b) = limits s = 0 for m in range(n): s += (L.subs(i, (a + m)) + R.subs(i, (b - m))) return s
[ "def", "telescopic_direct", "(", "L", ",", "R", ",", "n", ",", "limits", ")", ":", "(", "i", ",", "a", ",", "b", ")", "=", "limits", "s", "=", "0", "for", "m", "in", "range", "(", "n", ")", ":", "s", "+=", "(", "L", ".", "subs", "(", "i", ",", "(", "a", "+", "m", ")", ")", "+", "R", ".", "subs", "(", "i", ",", "(", "b", "-", "m", ")", ")", ")", "return", "s" ]
returns the direct summation of the terms of a telescopic sum l is the term with lower index r is the term with higher index n difference between the indexes of l and r for example: .
train
false
45,995
def model_pk_lock(func): def decorator(self, *args, **kwargs): from taiga.base.utils.db import get_typename_for_model_class pk = self.kwargs.get(self.pk_url_kwarg, None) tn = get_typename_for_model_class(self.get_queryset().model) key = '{0}:{1}'.format(tn, pk) with advisory_lock(key): return func(self, *args, **kwargs) return decorator
[ "def", "model_pk_lock", "(", "func", ")", ":", "def", "decorator", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "from", "taiga", ".", "base", ".", "utils", ".", "db", "import", "get_typename_for_model_class", "pk", "=", "self", ".", "kwargs", ".", "get", "(", "self", ".", "pk_url_kwarg", ",", "None", ")", "tn", "=", "get_typename_for_model_class", "(", "self", ".", "get_queryset", "(", ")", ".", "model", ")", "key", "=", "'{0}:{1}'", ".", "format", "(", "tn", ",", "pk", ")", "with", "advisory_lock", "(", "key", ")", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "decorator" ]
this decorator is designed to be used in modelviewsets methods to lock them based on the model and the id of the selected object .
train
false
45,996
def _update_sample(data, socket, sample=None, project=None): if (sample is None): project = (project or socket.spiderspec.project) spiders = project.spiders spider = spiders[data['spider']] samples = spider.samples sample = samples[data['sample']] path = 'spiders/{}/{}/{{}}.html'.format(data['spider'], data['sample']) else: path = _html_path(sample) if hasattr(sample, 'dump'): sample = sample.dump() html_path = path.format for (name, type_) in (('original_body', 'raw'), ('rendered_body', None)): try: path = html_path(name) html = decode(socket.storage.open(path).read()) except IOError: html = decoded_html(socket.tab, type_) socket.storage.save(path, ContentFile(html, path)) sample[name] = html return sample
[ "def", "_update_sample", "(", "data", ",", "socket", ",", "sample", "=", "None", ",", "project", "=", "None", ")", ":", "if", "(", "sample", "is", "None", ")", ":", "project", "=", "(", "project", "or", "socket", ".", "spiderspec", ".", "project", ")", "spiders", "=", "project", ".", "spiders", "spider", "=", "spiders", "[", "data", "[", "'spider'", "]", "]", "samples", "=", "spider", ".", "samples", "sample", "=", "samples", "[", "data", "[", "'sample'", "]", "]", "path", "=", "'spiders/{}/{}/{{}}.html'", ".", "format", "(", "data", "[", "'spider'", "]", ",", "data", "[", "'sample'", "]", ")", "else", ":", "path", "=", "_html_path", "(", "sample", ")", "if", "hasattr", "(", "sample", ",", "'dump'", ")", ":", "sample", "=", "sample", ".", "dump", "(", ")", "html_path", "=", "path", ".", "format", "for", "(", "name", ",", "type_", ")", "in", "(", "(", "'original_body'", ",", "'raw'", ")", ",", "(", "'rendered_body'", ",", "None", ")", ")", ":", "try", ":", "path", "=", "html_path", "(", "name", ")", "html", "=", "decode", "(", "socket", ".", "storage", ".", "open", "(", "path", ")", ".", "read", "(", ")", ")", "except", "IOError", ":", "html", "=", "decoded_html", "(", "socket", ".", "tab", ",", "type_", ")", "socket", ".", "storage", ".", "save", "(", "path", ",", "ContentFile", "(", "html", ",", "path", ")", ")", "sample", "[", "name", "]", "=", "html", "return", "sample" ]
recompile sample with latest annotations .
train
false
45,997
def image_update(call=None, kwargs=None): if (call != 'function'): raise SaltCloudSystemExit('The image_allocate function must be called with -f or --function.') if (kwargs is None): kwargs = {} image_id = kwargs.get('image_id', None) image_name = kwargs.get('image_name', None) path = kwargs.get('path', None) data = kwargs.get('data', None) update_type = kwargs.get('update_type', None) update_args = ['replace', 'merge'] if (update_type is None): raise SaltCloudSystemExit("The image_update function requires an 'update_type' to be provided.") if (update_type == update_args[0]): update_number = 0 elif (update_type == update_args[1]): update_number = 1 else: raise SaltCloudSystemExit('The update_type argument must be either {0} or {1}.'.format(update_args[0], update_args[1])) if image_id: if image_name: log.warning("Both the 'image_id' and 'image_name' arguments were provided. 'image_id' will take precedence.") elif image_name: image_id = get_image_id(kwargs={'name': image_name}) else: raise SaltCloudSystemExit("The image_update function requires either an 'image_id' or an 'image_name' to be provided.") if data: if path: log.warning("Both the 'data' and 'path' arguments were provided. 'data' will take precedence.") elif path: data = salt.utils.fopen(path, mode='r').read() else: raise SaltCloudSystemExit("The image_update function requires either 'data' or a file 'path' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.image.update(auth, int(image_id), data, int(update_number)) ret = {'action': 'image.update', 'updated': response[0], 'image_id': response[1], 'error_code': response[2]} return ret
[ "def", "image_update", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The image_allocate function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "image_id", "=", "kwargs", ".", "get", "(", "'image_id'", ",", "None", ")", "image_name", "=", "kwargs", ".", "get", "(", "'image_name'", ",", "None", ")", "path", "=", "kwargs", ".", "get", "(", "'path'", ",", "None", ")", "data", "=", "kwargs", ".", "get", "(", "'data'", ",", "None", ")", "update_type", "=", "kwargs", ".", "get", "(", "'update_type'", ",", "None", ")", "update_args", "=", "[", "'replace'", ",", "'merge'", "]", "if", "(", "update_type", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_update function requires an 'update_type' to be provided.\"", ")", "if", "(", "update_type", "==", "update_args", "[", "0", "]", ")", ":", "update_number", "=", "0", "elif", "(", "update_type", "==", "update_args", "[", "1", "]", ")", ":", "update_number", "=", "1", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The update_type argument must be either {0} or {1}.'", ".", "format", "(", "update_args", "[", "0", "]", ",", "update_args", "[", "1", "]", ")", ")", "if", "image_id", ":", "if", "image_name", ":", "log", ".", "warning", "(", "\"Both the 'image_id' and 'image_name' arguments were provided. 'image_id' will take precedence.\"", ")", "elif", "image_name", ":", "image_id", "=", "get_image_id", "(", "kwargs", "=", "{", "'name'", ":", "image_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_update function requires either an 'image_id' or an 'image_name' to be provided.\"", ")", "if", "data", ":", "if", "path", ":", "log", ".", "warning", "(", "\"Both the 'data' and 'path' arguments were provided. 'data' will take precedence.\"", ")", "elif", "path", ":", "data", "=", "salt", ".", "utils", ".", "fopen", "(", "path", ",", "mode", "=", "'r'", ")", ".", "read", "(", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "\"The image_update function requires either 'data' or a file 'path' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "image", ".", "update", "(", "auth", ",", "int", "(", "image_id", ")", ",", "data", ",", "int", "(", "update_number", ")", ")", "ret", "=", "{", "'action'", ":", "'image.update'", ",", "'updated'", ":", "response", "[", "0", "]", ",", "'image_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "ret" ]
set the given properties on an image and update it .
train
true
45,998
def add_input(cmd, immediate=False): if immediate: process_stdin(cmd) else: mpstate.input_queue.put(cmd)
[ "def", "add_input", "(", "cmd", ",", "immediate", "=", "False", ")", ":", "if", "immediate", ":", "process_stdin", "(", "cmd", ")", "else", ":", "mpstate", ".", "input_queue", ".", "put", "(", "cmd", ")" ]
add some command input to be processed .
train
true
45,999
def register_runners(runner_dirs=None, experimental=False, fail_on_failure=True): LOG.debug('Start : register runners') runner_count = 0 runner_loader = RunnersLoader() if (not runner_dirs): runner_dirs = content_utils.get_runners_base_paths() runners = runner_loader.get_runners(runner_dirs) for (runner, path) in runners.iteritems(): LOG.debug(('Runner "%s"' % runner)) runner_manifest = os.path.join(path, MANIFEST_FILE_NAME) meta_loader = MetaLoader() runner_types = meta_loader.load(runner_manifest) for runner_type in runner_types: runner_count += register_runner(runner_type, experimental) LOG.debug('End : register runners') return runner_count
[ "def", "register_runners", "(", "runner_dirs", "=", "None", ",", "experimental", "=", "False", ",", "fail_on_failure", "=", "True", ")", ":", "LOG", ".", "debug", "(", "'Start : register runners'", ")", "runner_count", "=", "0", "runner_loader", "=", "RunnersLoader", "(", ")", "if", "(", "not", "runner_dirs", ")", ":", "runner_dirs", "=", "content_utils", ".", "get_runners_base_paths", "(", ")", "runners", "=", "runner_loader", ".", "get_runners", "(", "runner_dirs", ")", "for", "(", "runner", ",", "path", ")", "in", "runners", ".", "iteritems", "(", ")", ":", "LOG", ".", "debug", "(", "(", "'Runner \"%s\"'", "%", "runner", ")", ")", "runner_manifest", "=", "os", ".", "path", ".", "join", "(", "path", ",", "MANIFEST_FILE_NAME", ")", "meta_loader", "=", "MetaLoader", "(", ")", "runner_types", "=", "meta_loader", ".", "load", "(", "runner_manifest", ")", "for", "runner_type", "in", "runner_types", ":", "runner_count", "+=", "register_runner", "(", "runner_type", ",", "experimental", ")", "LOG", ".", "debug", "(", "'End : register runners'", ")", "return", "runner_count" ]
register runners .
train
false
46,001
def get_unknown_opttrans_attr(path): path_attrs = path.pathattr_map unknown_opt_tran_attrs = {} for (_, attr) in path_attrs.items(): if ((isinstance(attr, BGPPathAttributeUnknown) and (attr.flags & (BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE))) or isinstance(attr, BGPPathAttributeAs4Path) or isinstance(attr, BGPPathAttributeAs4Aggregator)): unknown_opt_tran_attrs[attr.type] = attr return unknown_opt_tran_attrs
[ "def", "get_unknown_opttrans_attr", "(", "path", ")", ":", "path_attrs", "=", "path", ".", "pathattr_map", "unknown_opt_tran_attrs", "=", "{", "}", "for", "(", "_", ",", "attr", ")", "in", "path_attrs", ".", "items", "(", ")", ":", "if", "(", "(", "isinstance", "(", "attr", ",", "BGPPathAttributeUnknown", ")", "and", "(", "attr", ".", "flags", "&", "(", "BGP_ATTR_FLAG_OPTIONAL", "|", "BGP_ATTR_FLAG_TRANSITIVE", ")", ")", ")", "or", "isinstance", "(", "attr", ",", "BGPPathAttributeAs4Path", ")", "or", "isinstance", "(", "attr", ",", "BGPPathAttributeAs4Aggregator", ")", ")", ":", "unknown_opt_tran_attrs", "[", "attr", ".", "type", "]", "=", "attr", "return", "unknown_opt_tran_attrs" ]
utility method that gives a dict of unknown and unsupported optional transitive path attributes of path .
train
true
46,002
def setVideoMode(videoMode): if haveBitsDLL: return _bits.bitsSetVideoMode(videoMode) else: return 1
[ "def", "setVideoMode", "(", "videoMode", ")", ":", "if", "haveBitsDLL", ":", "return", "_bits", ".", "bitsSetVideoMode", "(", "videoMode", ")", "else", ":", "return", "1" ]
set the video mode of the bits++ bits8bitpalettemode = 0x00000001 # normal vsg mode nogammacorrect = 0x00004000 # no gamma correction mode gammacorrect = 0x00008000 # gamma correction mode videoencodedcomms = 0x00080000 .
train
false
46,003
def test_iterator(): x = csr_matrix([[1, 2, 0], [0, 0, 3], [4, 0, 5]]) ds = SparseDataset(from_scipy_sparse_dataset=x) it = ds.iterator(mode='sequential', batch_size=1) it.next()
[ "def", "test_iterator", "(", ")", ":", "x", "=", "csr_matrix", "(", "[", "[", "1", ",", "2", ",", "0", "]", ",", "[", "0", ",", "0", ",", "3", "]", ",", "[", "4", ",", "0", ",", "5", "]", "]", ")", "ds", "=", "SparseDataset", "(", "from_scipy_sparse_dataset", "=", "x", ")", "it", "=", "ds", ".", "iterator", "(", "mode", "=", "'sequential'", ",", "batch_size", "=", "1", ")", "it", ".", "next", "(", ")" ]
tests whether sparsedataset can be loaded and initializes iterator .
train
false
46,004
def _is_sunder(name): return ((name[0] == name[(-1)] == '_') and (name[1:2] != '_') and (name[(-2):(-1)] != '_') and (len(name) > 2))
[ "def", "_is_sunder", "(", "name", ")", ":", "return", "(", "(", "name", "[", "0", "]", "==", "name", "[", "(", "-", "1", ")", "]", "==", "'_'", ")", "and", "(", "name", "[", "1", ":", "2", "]", "!=", "'_'", ")", "and", "(", "name", "[", "(", "-", "2", ")", ":", "(", "-", "1", ")", "]", "!=", "'_'", ")", "and", "(", "len", "(", "name", ")", ">", "2", ")", ")" ]
returns true if a _sunder_ name .
train
false
46,005
def widget_display_undecorated(request, user_to_display__username): 'so it can referenced by widget_display_string.' user = get_object_or_404(User, username=user_to_display__username) person = get_object_or_404(Person, user=user) data = get_personal_data(person) data.update(mysite.base.view_helpers.get_uri_metadata_for_generating_absolute_links(request)) return (request, 'profile/widget.html', data)
[ "def", "widget_display_undecorated", "(", "request", ",", "user_to_display__username", ")", ":", "user", "=", "get_object_or_404", "(", "User", ",", "username", "=", "user_to_display__username", ")", "person", "=", "get_object_or_404", "(", "Person", ",", "user", "=", "user", ")", "data", "=", "get_personal_data", "(", "person", ")", "data", ".", "update", "(", "mysite", ".", "base", ".", "view_helpers", ".", "get_uri_metadata_for_generating_absolute_links", "(", "request", ")", ")", "return", "(", "request", ",", "'profile/widget.html'", ",", "data", ")" ]
we leave this function unwrapped by @view .
train
false
46,006
def unary_operator(op): valid_ops = {'-'} if (op not in valid_ops): raise ValueError(('Invalid unary operator %s.' % op)) @with_doc(("Unary Operator: '%s'" % op)) @with_name(unary_op_name(op)) def unary_operator(self): if (self.dtype != float64_dtype): raise TypeError("Can't apply unary operator {op!r} to instance of {typename!r} with dtype {dtypename!r}.\n{op!r} is only supported for Factors of dtype 'float64'.".format(op=op, typename=type(self).__name__, dtypename=self.dtype.name)) if isinstance(self, NumericalExpression): return NumExprFactor('{op}({expr})'.format(op=op, expr=self._expr), self.inputs, dtype=float64_dtype) else: return NumExprFactor('{op}x_0'.format(op=op), (self,), dtype=float64_dtype) return unary_operator
[ "def", "unary_operator", "(", "op", ")", ":", "valid_ops", "=", "{", "'-'", "}", "if", "(", "op", "not", "in", "valid_ops", ")", ":", "raise", "ValueError", "(", "(", "'Invalid unary operator %s.'", "%", "op", ")", ")", "@", "with_doc", "(", "(", "\"Unary Operator: '%s'\"", "%", "op", ")", ")", "@", "with_name", "(", "unary_op_name", "(", "op", ")", ")", "def", "unary_operator", "(", "self", ")", ":", "if", "(", "self", ".", "dtype", "!=", "float64_dtype", ")", ":", "raise", "TypeError", "(", "\"Can't apply unary operator {op!r} to instance of {typename!r} with dtype {dtypename!r}.\\n{op!r} is only supported for Factors of dtype 'float64'.\"", ".", "format", "(", "op", "=", "op", ",", "typename", "=", "type", "(", "self", ")", ".", "__name__", ",", "dtypename", "=", "self", ".", "dtype", ".", "name", ")", ")", "if", "isinstance", "(", "self", ",", "NumericalExpression", ")", ":", "return", "NumExprFactor", "(", "'{op}({expr})'", ".", "format", "(", "op", "=", "op", ",", "expr", "=", "self", ".", "_expr", ")", ",", "self", ".", "inputs", ",", "dtype", "=", "float64_dtype", ")", "else", ":", "return", "NumExprFactor", "(", "'{op}x_0'", ".", "format", "(", "op", "=", "op", ")", ",", "(", "self", ",", ")", ",", "dtype", "=", "float64_dtype", ")", "return", "unary_operator" ]
factory function for making unary operator methods for factors .
train
true
46,008
def validate_filemask(val): if ('*' not in val): raise ValidationError(_('File mask does not contain * as a language placeholder!'))
[ "def", "validate_filemask", "(", "val", ")", ":", "if", "(", "'*'", "not", "in", "val", ")", ":", "raise", "ValidationError", "(", "_", "(", "'File mask does not contain * as a language placeholder!'", ")", ")" ]
validates file mask that it contains * .
train
false
46,009
def error_summary(error_dict): def prettify(field_name): field_name = re.sub('(?<!\\w)[Uu]rl(?!\\w)', 'URL', field_name.replace('_', ' ').capitalize()) return _(field_name.replace('_', ' ')) summary = {} for (key, error) in error_dict.iteritems(): if (key == 'resources'): summary[_('Resources')] = _('Package resource(s) invalid') elif (key == 'extras'): summary[_('Extras')] = _('Missing Value') elif (key == 'extras_validation'): summary[_('Extras')] = error[0] else: summary[_(prettify(key))] = error[0] return summary
[ "def", "error_summary", "(", "error_dict", ")", ":", "def", "prettify", "(", "field_name", ")", ":", "field_name", "=", "re", ".", "sub", "(", "'(?<!\\\\w)[Uu]rl(?!\\\\w)'", ",", "'URL'", ",", "field_name", ".", "replace", "(", "'_'", ",", "' '", ")", ".", "capitalize", "(", ")", ")", "return", "_", "(", "field_name", ".", "replace", "(", "'_'", ",", "' '", ")", ")", "summary", "=", "{", "}", "for", "(", "key", ",", "error", ")", "in", "error_dict", ".", "iteritems", "(", ")", ":", "if", "(", "key", "==", "'resources'", ")", ":", "summary", "[", "_", "(", "'Resources'", ")", "]", "=", "_", "(", "'Package resource(s) invalid'", ")", "elif", "(", "key", "==", "'extras'", ")", ":", "summary", "[", "_", "(", "'Extras'", ")", "]", "=", "_", "(", "'Missing Value'", ")", "elif", "(", "key", "==", "'extras_validation'", ")", ":", "summary", "[", "_", "(", "'Extras'", ")", "]", "=", "error", "[", "0", "]", "else", ":", "summary", "[", "_", "(", "prettify", "(", "key", ")", ")", "]", "=", "error", "[", "0", "]", "return", "summary" ]
do some i18n stuff on the error_dict keys .
train
false
46,010
@XFAIL def test_mul2(): assert (2 * (x + 1)).is_Mul
[ "@", "XFAIL", "def", "test_mul2", "(", ")", ":", "assert", "(", "2", "*", "(", "x", "+", "1", ")", ")", ".", "is_Mul" ]
when this fails .
train
false
46,011
def GenerateOAuthAccessTokenUrl(authorized_request_token, oauth_input_params, access_token_url='https://www.google.com/accounts/OAuthGetAccessToken', oauth_version='1.0', oauth_verifier=None): oauth_token = oauth.OAuthToken(authorized_request_token.key, authorized_request_token.secret) parameters = {'oauth_version': oauth_version} if (oauth_verifier is not None): parameters['oauth_verifier'] = oauth_verifier oauth_request = oauth.OAuthRequest.from_consumer_and_token(oauth_input_params.GetConsumer(), token=oauth_token, http_url=access_token_url, parameters=parameters) oauth_request.sign_request(oauth_input_params.GetSignatureMethod(), oauth_input_params.GetConsumer(), oauth_token) return atom.url.parse_url(oauth_request.to_url())
[ "def", "GenerateOAuthAccessTokenUrl", "(", "authorized_request_token", ",", "oauth_input_params", ",", "access_token_url", "=", "'https://www.google.com/accounts/OAuthGetAccessToken'", ",", "oauth_version", "=", "'1.0'", ",", "oauth_verifier", "=", "None", ")", ":", "oauth_token", "=", "oauth", ".", "OAuthToken", "(", "authorized_request_token", ".", "key", ",", "authorized_request_token", ".", "secret", ")", "parameters", "=", "{", "'oauth_version'", ":", "oauth_version", "}", "if", "(", "oauth_verifier", "is", "not", "None", ")", ":", "parameters", "[", "'oauth_verifier'", "]", "=", "oauth_verifier", "oauth_request", "=", "oauth", ".", "OAuthRequest", ".", "from_consumer_and_token", "(", "oauth_input_params", ".", "GetConsumer", "(", ")", ",", "token", "=", "oauth_token", ",", "http_url", "=", "access_token_url", ",", "parameters", "=", "parameters", ")", "oauth_request", ".", "sign_request", "(", "oauth_input_params", ".", "GetSignatureMethod", "(", ")", ",", "oauth_input_params", ".", "GetConsumer", "(", ")", ",", "oauth_token", ")", "return", "atom", ".", "url", ".", "parse_url", "(", "oauth_request", ".", "to_url", "(", ")", ")" ]
generates url at which user will login to authorize the request token .
train
false
46,012
def quarantine_renamer(device_path, corrupted_file_path): from_dir = dirname(corrupted_file_path) to_dir = join(device_path, 'quarantined', 'objects', basename(from_dir)) invalidate_hash(dirname(from_dir)) try: renamer(from_dir, to_dir) except OSError as e: if (e.errno not in (errno.EEXIST, errno.ENOTEMPTY)): raise to_dir = ('%s-%s' % (to_dir, uuid.uuid4().hex)) renamer(from_dir, to_dir) return to_dir
[ "def", "quarantine_renamer", "(", "device_path", ",", "corrupted_file_path", ")", ":", "from_dir", "=", "dirname", "(", "corrupted_file_path", ")", "to_dir", "=", "join", "(", "device_path", ",", "'quarantined'", ",", "'objects'", ",", "basename", "(", "from_dir", ")", ")", "invalidate_hash", "(", "dirname", "(", "from_dir", ")", ")", "try", ":", "renamer", "(", "from_dir", ",", "to_dir", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "not", "in", "(", "errno", ".", "EEXIST", ",", "errno", ".", "ENOTEMPTY", ")", ")", ":", "raise", "to_dir", "=", "(", "'%s-%s'", "%", "(", "to_dir", ",", "uuid", ".", "uuid4", "(", ")", ".", "hex", ")", ")", "renamer", "(", "from_dir", ",", "to_dir", ")", "return", "to_dir" ]
in the case that a file is corrupted .
train
false
46,013
def s3_format_fullname(fname=None, mname=None, lname=None, truncate=True): name = '' if (fname or mname or lname): if (not fname): fname = '' if (not mname): mname = '' if (not lname): lname = '' if truncate: fname = ('%s' % s3_truncate(fname, 24)) mname = ('%s' % s3_truncate(mname, 24)) lname = ('%s' % s3_truncate(lname, 24, nice=False)) name_format = current.deployment_settings.get_pr_name_format() name = (name_format % dict(first_name=fname, middle_name=mname, last_name=lname)) name = name.replace(' ', ' ').rstrip() if truncate: name = s3_truncate(name, 24, nice=False) return name
[ "def", "s3_format_fullname", "(", "fname", "=", "None", ",", "mname", "=", "None", ",", "lname", "=", "None", ",", "truncate", "=", "True", ")", ":", "name", "=", "''", "if", "(", "fname", "or", "mname", "or", "lname", ")", ":", "if", "(", "not", "fname", ")", ":", "fname", "=", "''", "if", "(", "not", "mname", ")", ":", "mname", "=", "''", "if", "(", "not", "lname", ")", ":", "lname", "=", "''", "if", "truncate", ":", "fname", "=", "(", "'%s'", "%", "s3_truncate", "(", "fname", ",", "24", ")", ")", "mname", "=", "(", "'%s'", "%", "s3_truncate", "(", "mname", ",", "24", ")", ")", "lname", "=", "(", "'%s'", "%", "s3_truncate", "(", "lname", ",", "24", ",", "nice", "=", "False", ")", ")", "name_format", "=", "current", ".", "deployment_settings", ".", "get_pr_name_format", "(", ")", "name", "=", "(", "name_format", "%", "dict", "(", "first_name", "=", "fname", ",", "middle_name", "=", "mname", ",", "last_name", "=", "lname", ")", ")", "name", "=", "name", ".", "replace", "(", "' '", ",", "' '", ")", ".", "rstrip", "(", ")", "if", "truncate", ":", "name", "=", "s3_truncate", "(", "name", ",", "24", ",", "nice", "=", "False", ")", "return", "name" ]
formats the full name of a person .
train
false
46,014
def _tgrep_exprs_action(_s, _l, tokens): if (len(tokens) == 1): return (lambda n, m=None, l=None: tokens[0](n, None, {})) tokens = [x for x in tokens if (x != u';')] macro_dict = {} macro_defs = [tok for tok in tokens if isinstance(tok, dict)] for macro_def in macro_defs: macro_dict.update(macro_def) tgrep_exprs = [tok for tok in tokens if (not isinstance(tok, dict))] def top_level_pred(n, m=macro_dict, l=None): label_dict = {} return any((predicate(n, m, label_dict) for predicate in tgrep_exprs)) return top_level_pred
[ "def", "_tgrep_exprs_action", "(", "_s", ",", "_l", ",", "tokens", ")", ":", "if", "(", "len", "(", "tokens", ")", "==", "1", ")", ":", "return", "(", "lambda", "n", ",", "m", "=", "None", ",", "l", "=", "None", ":", "tokens", "[", "0", "]", "(", "n", ",", "None", ",", "{", "}", ")", ")", "tokens", "=", "[", "x", "for", "x", "in", "tokens", "if", "(", "x", "!=", "u';'", ")", "]", "macro_dict", "=", "{", "}", "macro_defs", "=", "[", "tok", "for", "tok", "in", "tokens", "if", "isinstance", "(", "tok", ",", "dict", ")", "]", "for", "macro_def", "in", "macro_defs", ":", "macro_dict", ".", "update", "(", "macro_def", ")", "tgrep_exprs", "=", "[", "tok", "for", "tok", "in", "tokens", "if", "(", "not", "isinstance", "(", "tok", ",", "dict", ")", ")", "]", "def", "top_level_pred", "(", "n", ",", "m", "=", "macro_dict", ",", "l", "=", "None", ")", ":", "label_dict", "=", "{", "}", "return", "any", "(", "(", "predicate", "(", "n", ",", "m", ",", "label_dict", ")", "for", "predicate", "in", "tgrep_exprs", ")", ")", "return", "top_level_pred" ]
this is the top-lebel node in a tgrep2 search string; the predicate function it returns binds together all the state of a tgrep2 search string .
train
false
46,015
@pytest.fixture(scope='session') def dcos_launchpad(dcos_api_session): if ('AWS_STACK_NAME' not in os.environ): pytest.skip('Must use a AWS Cloudformation to run test') stack_name = os.environ['AWS_STACK_NAME'] aws_region = os.environ['AWS_REGION'] aws_access_key_id = os.environ['AWS_ACCESS_KEY_ID'] aws_secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY'] bw = test_util.aws.BotoWrapper(aws_region, aws_access_key_id, aws_secret_access_key) return test_util.aws.DcosCfSimple(stack_name, bw)
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'session'", ")", "def", "dcos_launchpad", "(", "dcos_api_session", ")", ":", "if", "(", "'AWS_STACK_NAME'", "not", "in", "os", ".", "environ", ")", ":", "pytest", ".", "skip", "(", "'Must use a AWS Cloudformation to run test'", ")", "stack_name", "=", "os", ".", "environ", "[", "'AWS_STACK_NAME'", "]", "aws_region", "=", "os", ".", "environ", "[", "'AWS_REGION'", "]", "aws_access_key_id", "=", "os", ".", "environ", "[", "'AWS_ACCESS_KEY_ID'", "]", "aws_secret_access_key", "=", "os", ".", "environ", "[", "'AWS_SECRET_ACCESS_KEY'", "]", "bw", "=", "test_util", ".", "aws", ".", "BotoWrapper", "(", "aws_region", ",", "aws_access_key_id", ",", "aws_secret_access_key", ")", "return", "test_util", ".", "aws", ".", "DcosCfSimple", "(", "stack_name", ",", "bw", ")" ]
interface for direct integration to dcos_launchpad hardware currently only supports aws cf with aws vpc coming soon .
train
false
46,016
def _set_wmi_setting(wmi_class_name, setting, value, server): with salt.utils.winapi.Com(): try: connection = wmi.WMI(namespace=_WMI_NAMESPACE) wmi_class = getattr(connection, wmi_class_name) objs = wmi_class(Name=server)[0] except wmi.x_wmi as error: _LOG.error('Encountered WMI error: %s', error.com_error) except (AttributeError, IndexError) as error: _LOG.error('Error getting %s: %s', wmi_class_name, error) try: setattr(objs, setting, value) return True except wmi.x_wmi as error: _LOG.error('Encountered WMI error: %s', error.com_error) except AttributeError as error: _LOG.error('Error setting %s: %s', setting, error) return False
[ "def", "_set_wmi_setting", "(", "wmi_class_name", ",", "setting", ",", "value", ",", "server", ")", ":", "with", "salt", ".", "utils", ".", "winapi", ".", "Com", "(", ")", ":", "try", ":", "connection", "=", "wmi", ".", "WMI", "(", "namespace", "=", "_WMI_NAMESPACE", ")", "wmi_class", "=", "getattr", "(", "connection", ",", "wmi_class_name", ")", "objs", "=", "wmi_class", "(", "Name", "=", "server", ")", "[", "0", "]", "except", "wmi", ".", "x_wmi", "as", "error", ":", "_LOG", ".", "error", "(", "'Encountered WMI error: %s'", ",", "error", ".", "com_error", ")", "except", "(", "AttributeError", ",", "IndexError", ")", "as", "error", ":", "_LOG", ".", "error", "(", "'Error getting %s: %s'", ",", "wmi_class_name", ",", "error", ")", "try", ":", "setattr", "(", "objs", ",", "setting", ",", "value", ")", "return", "True", "except", "wmi", ".", "x_wmi", "as", "error", ":", "_LOG", ".", "error", "(", "'Encountered WMI error: %s'", ",", "error", ".", "com_error", ")", "except", "AttributeError", "as", "error", ":", "_LOG", ".", "error", "(", "'Error setting %s: %s'", ",", "setting", ",", "error", ")", "return", "False" ]
set the value of the setting for the provided class .
train
true
46,019
def unindent_dict(docdict): can_dict = {} for (name, dstr) in docdict.items(): can_dict[name] = unindent_string(dstr) return can_dict
[ "def", "unindent_dict", "(", "docdict", ")", ":", "can_dict", "=", "{", "}", "for", "(", "name", ",", "dstr", ")", "in", "docdict", ".", "items", "(", ")", ":", "can_dict", "[", "name", "]", "=", "unindent_string", "(", "dstr", ")", "return", "can_dict" ]
unindent all strings in a docdict .
train
false
46,020
def p_command_if(p): p[0] = ('IF', p[2], int(p[4]))
[ "def", "p_command_if", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "'IF'", ",", "p", "[", "2", "]", ",", "int", "(", "p", "[", "4", "]", ")", ")" ]
command : if relexpr then integer .
train
false
46,021
@contextmanager def set_locale(new_locale, lc_var=locale.LC_ALL): current_locale = locale.getlocale() try: locale.setlocale(lc_var, new_locale) try: normalized_locale = locale.getlocale() except ValueError: (yield new_locale) else: if all(((lc is not None) for lc in normalized_locale)): (yield '.'.join(normalized_locale)) else: (yield new_locale) finally: locale.setlocale(lc_var, current_locale)
[ "@", "contextmanager", "def", "set_locale", "(", "new_locale", ",", "lc_var", "=", "locale", ".", "LC_ALL", ")", ":", "current_locale", "=", "locale", ".", "getlocale", "(", ")", "try", ":", "locale", ".", "setlocale", "(", "lc_var", ",", "new_locale", ")", "try", ":", "normalized_locale", "=", "locale", ".", "getlocale", "(", ")", "except", "ValueError", ":", "(", "yield", "new_locale", ")", "else", ":", "if", "all", "(", "(", "(", "lc", "is", "not", "None", ")", "for", "lc", "in", "normalized_locale", ")", ")", ":", "(", "yield", "'.'", ".", "join", "(", "normalized_locale", ")", ")", "else", ":", "(", "yield", "new_locale", ")", "finally", ":", "locale", ".", "setlocale", "(", "lc_var", ",", "current_locale", ")" ]
retrieve locale from a prioritized list of sources and then set locale and save it cls: self object force: a locale to force set return: locale as string or none if i18n should be disabled .
train
false
46,022
@register.simple_tag(takes_context=True) def render_product(context, product): if (not product): return '' names = [('catalogue/partials/product/upc-%s.html' % product.upc), ('catalogue/partials/product/class-%s.html' % product.get_product_class().slug), 'catalogue/partials/product.html'] template_ = select_template(names) context['product'] = product return template_.render(context)
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "render_product", "(", "context", ",", "product", ")", ":", "if", "(", "not", "product", ")", ":", "return", "''", "names", "=", "[", "(", "'catalogue/partials/product/upc-%s.html'", "%", "product", ".", "upc", ")", ",", "(", "'catalogue/partials/product/class-%s.html'", "%", "product", ".", "get_product_class", "(", ")", ".", "slug", ")", ",", "'catalogue/partials/product.html'", "]", "template_", "=", "select_template", "(", "names", ")", "context", "[", "'product'", "]", "=", "product", "return", "template_", ".", "render", "(", "context", ")" ]
render a product snippet as you would see in a browsing display .
train
false
46,023
def _replAllXMLRef(match): ref = match.group(1) value = everyentcharrefsget(ref) if (value is None): if (ref[0] == '#'): return unichr(int(ref[1:])) else: return ref return value
[ "def", "_replAllXMLRef", "(", "match", ")", ":", "ref", "=", "match", ".", "group", "(", "1", ")", "value", "=", "everyentcharrefsget", "(", "ref", ")", "if", "(", "value", "is", "None", ")", ":", "if", "(", "ref", "[", "0", "]", "==", "'#'", ")", ":", "return", "unichr", "(", "int", "(", "ref", "[", "1", ":", "]", ")", ")", "else", ":", "return", "ref", "return", "value" ]
replace the matched xml reference .
train
false
46,024
@pytest.mark.parametrize(u'text', [u'3/4/2012', u'01/12/1900']) def test_issue740(en_tokenizer, text): tokens = en_tokenizer(text) assert (len(tokens) == 1)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'text'", ",", "[", "u'3/4/2012'", ",", "u'01/12/1900'", "]", ")", "def", "test_issue740", "(", "en_tokenizer", ",", "text", ")", ":", "tokens", "=", "en_tokenizer", "(", "text", ")", "assert", "(", "len", "(", "tokens", ")", "==", "1", ")" ]
test that dates are not split and kept as one token .
train
false
46,025
def monitor_except_format(logical_line): if logical_line.startswith('except:'): (yield (6, "ENERGY N201: no 'except:' at least use 'except Exception:'"))
[ "def", "monitor_except_format", "(", "logical_line", ")", ":", "if", "logical_line", ".", "startswith", "(", "'except:'", ")", ":", "(", "yield", "(", "6", ",", "\"ENERGY N201: no 'except:' at least use 'except Exception:'\"", ")", ")" ]
check for except: .
train
false
46,026
def logical_volume_info(path): (out, err) = execute('lvs', '-o', 'vg_all,lv_all', '--separator', '|', path, run_as_root=True) info = [line.split('|') for line in out.splitlines()] if (len(info) != 2): raise RuntimeError((_('Path %s must be LVM logical volume') % path)) return dict(zip(*info))
[ "def", "logical_volume_info", "(", "path", ")", ":", "(", "out", ",", "err", ")", "=", "execute", "(", "'lvs'", ",", "'-o'", ",", "'vg_all,lv_all'", ",", "'--separator'", ",", "'|'", ",", "path", ",", "run_as_root", "=", "True", ")", "info", "=", "[", "line", ".", "split", "(", "'|'", ")", "for", "line", "in", "out", ".", "splitlines", "(", ")", "]", "if", "(", "len", "(", "info", ")", "!=", "2", ")", ":", "raise", "RuntimeError", "(", "(", "_", "(", "'Path %s must be LVM logical volume'", ")", "%", "path", ")", ")", "return", "dict", "(", "zip", "(", "*", "info", ")", ")" ]
get logical volume info .
train
false
46,028
def upsampling_2d(x, indexes, ksize, stride=None, pad=0, outsize=None, cover_all=True): return Upsampling2D(indexes, ksize, stride, pad, outsize, cover_all)(x)
[ "def", "upsampling_2d", "(", "x", ",", "indexes", ",", "ksize", ",", "stride", "=", "None", ",", "pad", "=", "0", ",", "outsize", "=", "None", ",", "cover_all", "=", "True", ")", ":", "return", "Upsampling2D", "(", "indexes", ",", "ksize", ",", "stride", ",", "pad", ",", "outsize", ",", "cover_all", ")", "(", "x", ")" ]
upsampling using pooling indices .
train
false
46,029
def get_crashinfo_dir(host): host_resultdir = getattr(getattr(host, 'job', None), 'resultdir', None) if host_resultdir: infodir = host_resultdir else: infodir = os.path.abspath(os.getcwd()) infodir = os.path.join(infodir, ('crashinfo.%s' % host.hostname)) if (not os.path.exists(infodir)): os.mkdir(infodir) return infodir
[ "def", "get_crashinfo_dir", "(", "host", ")", ":", "host_resultdir", "=", "getattr", "(", "getattr", "(", "host", ",", "'job'", ",", "None", ")", ",", "'resultdir'", ",", "None", ")", "if", "host_resultdir", ":", "infodir", "=", "host_resultdir", "else", ":", "infodir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "getcwd", "(", ")", ")", "infodir", "=", "os", ".", "path", ".", "join", "(", "infodir", ",", "(", "'crashinfo.%s'", "%", "host", ".", "hostname", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "infodir", ")", ")", ":", "os", ".", "mkdir", "(", "infodir", ")", "return", "infodir" ]
find and if necessary create a directory to store crashinfo in .
train
false
46,030
def is_running(proxyname): return {'result': _is_proxy_running(proxyname)}
[ "def", "is_running", "(", "proxyname", ")", ":", "return", "{", "'result'", ":", "_is_proxy_running", "(", "proxyname", ")", "}" ]
check if the specified container is running container container id returns true if container is running otherwise returns false cli example: .
train
false
46,031
def str_strip(arr, to_strip=None, side='both'): if (side == 'both'): f = (lambda x: x.strip(to_strip)) elif (side == 'left'): f = (lambda x: x.lstrip(to_strip)) elif (side == 'right'): f = (lambda x: x.rstrip(to_strip)) else: raise ValueError('Invalid side') return _na_map(f, arr)
[ "def", "str_strip", "(", "arr", ",", "to_strip", "=", "None", ",", "side", "=", "'both'", ")", ":", "if", "(", "side", "==", "'both'", ")", ":", "f", "=", "(", "lambda", "x", ":", "x", ".", "strip", "(", "to_strip", ")", ")", "elif", "(", "side", "==", "'left'", ")", ":", "f", "=", "(", "lambda", "x", ":", "x", ".", "lstrip", "(", "to_strip", ")", ")", "elif", "(", "side", "==", "'right'", ")", ":", "f", "=", "(", "lambda", "x", ":", "x", ".", "rstrip", "(", "to_strip", ")", ")", "else", ":", "raise", "ValueError", "(", "'Invalid side'", ")", "return", "_na_map", "(", "f", ",", "arr", ")" ]
strip whitespace from each string in the series/index .
train
true
46,033
def _sym_ortho(a, b): if (b == 0): return (np.sign(a), 0, abs(a)) elif (a == 0): return (0, np.sign(b), abs(b)) elif (abs(b) > abs(a)): tau = (a / b) s = (np.sign(b) / sqrt((1 + (tau * tau)))) c = (s * tau) r = (b / s) else: tau = (b / a) c = (np.sign(a) / sqrt((1 + (tau * tau)))) s = (c * tau) r = (a / c) return (c, s, r)
[ "def", "_sym_ortho", "(", "a", ",", "b", ")", ":", "if", "(", "b", "==", "0", ")", ":", "return", "(", "np", ".", "sign", "(", "a", ")", ",", "0", ",", "abs", "(", "a", ")", ")", "elif", "(", "a", "==", "0", ")", ":", "return", "(", "0", ",", "np", ".", "sign", "(", "b", ")", ",", "abs", "(", "b", ")", ")", "elif", "(", "abs", "(", "b", ")", ">", "abs", "(", "a", ")", ")", ":", "tau", "=", "(", "a", "/", "b", ")", "s", "=", "(", "np", ".", "sign", "(", "b", ")", "/", "sqrt", "(", "(", "1", "+", "(", "tau", "*", "tau", ")", ")", ")", ")", "c", "=", "(", "s", "*", "tau", ")", "r", "=", "(", "b", "/", "s", ")", "else", ":", "tau", "=", "(", "b", "/", "a", ")", "c", "=", "(", "np", ".", "sign", "(", "a", ")", "/", "sqrt", "(", "(", "1", "+", "(", "tau", "*", "tau", ")", ")", ")", ")", "s", "=", "(", "c", "*", "tau", ")", "r", "=", "(", "a", "/", "c", ")", "return", "(", "c", ",", "s", ",", "r", ")" ]
stable implementation of givens rotation .
train
false
46,036
def _find_address_range(addresses): first = last = addresses[0] for ip in addresses[1:]: if (ip._ip == (last._ip + 1)): last = ip else: break return (first, last)
[ "def", "_find_address_range", "(", "addresses", ")", ":", "first", "=", "last", "=", "addresses", "[", "0", "]", "for", "ip", "in", "addresses", "[", "1", ":", "]", ":", "if", "(", "ip", ".", "_ip", "==", "(", "last", ".", "_ip", "+", "1", ")", ")", ":", "last", "=", "ip", "else", ":", "break", "return", "(", "first", ",", "last", ")" ]
find a sequence of sorted deduplicated ipv#address .
train
true
46,037
def test_prewitt_mask(): np.random.seed(0) result = filters.prewitt(np.random.uniform(size=(10, 10)), np.zeros((10, 10), bool)) assert_allclose(np.abs(result), 0)
[ "def", "test_prewitt_mask", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "0", ")", "result", "=", "filters", ".", "prewitt", "(", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "10", ",", "10", ")", ")", ",", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose", "(", "np", ".", "abs", "(", "result", ")", ",", "0", ")" ]
prewitt on a masked array should be zero .
train
false
46,038
def distinct(iterable): seen = set() for item in iter(iterable): if (item not in seen): (yield item) seen.add(item)
[ "def", "distinct", "(", "iterable", ")", ":", "seen", "=", "set", "(", ")", "for", "item", "in", "iter", "(", "iterable", ")", ":", "if", "(", "item", "not", "in", "seen", ")", ":", "(", "yield", "item", ")", "seen", ".", "add", "(", "item", ")" ]
yield all items in an iterable collection that are distinct .
train
false
46,040
def all_bases_valid(seq): valid_bases = ['a', 'A', 'c', 'C', 'g', 'G', 't', 'T', 'N'] for base in seq: if (base not in valid_bases): return False return True
[ "def", "all_bases_valid", "(", "seq", ")", ":", "valid_bases", "=", "[", "'a'", ",", "'A'", ",", "'c'", ",", "'C'", ",", "'g'", ",", "'G'", ",", "'t'", ",", "'T'", ",", "'N'", "]", "for", "base", "in", "seq", ":", "if", "(", "base", "not", "in", "valid_bases", ")", ":", "return", "False", "return", "True" ]
confirm that the sequence contains only bases .
train
false
46,043
def filename_from_content_disposition(content_disposition): msg = Message(('Content-Disposition: %s' % content_disposition)) filename = msg.get_filename() if filename: filename = os.path.basename(filename).lstrip('.').strip() if filename: return filename
[ "def", "filename_from_content_disposition", "(", "content_disposition", ")", ":", "msg", "=", "Message", "(", "(", "'Content-Disposition: %s'", "%", "content_disposition", ")", ")", "filename", "=", "msg", ".", "get_filename", "(", ")", "if", "filename", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", ".", "lstrip", "(", "'.'", ")", ".", "strip", "(", ")", "if", "filename", ":", "return", "filename" ]
extract and validate filename from a content-disposition header .
train
false
46,044
def get_brick(var): return get_annotation(var, Brick)
[ "def", "get_brick", "(", "var", ")", ":", "return", "get_annotation", "(", "var", ",", "Brick", ")" ]
retrieves the brick that created this variable .
train
false
46,045
def downloadURL(url, fname): fpIn = urllib_request.urlopen(url) fpOut = open(fname, 'wb') block = fpIn.read(10240) try: while block: fpOut.write(block) block = fpIn.read(10240) fpIn.close() fpOut.close() except: try: os.unlink(fname) except: pass
[ "def", "downloadURL", "(", "url", ",", "fname", ")", ":", "fpIn", "=", "urllib_request", ".", "urlopen", "(", "url", ")", "fpOut", "=", "open", "(", "fname", ",", "'wb'", ")", "block", "=", "fpIn", ".", "read", "(", "10240", ")", "try", ":", "while", "block", ":", "fpOut", ".", "write", "(", "block", ")", "block", "=", "fpIn", ".", "read", "(", "10240", ")", "fpIn", ".", "close", "(", ")", "fpOut", ".", "close", "(", ")", "except", ":", "try", ":", "os", ".", "unlink", "(", "fname", ")", "except", ":", "pass" ]
download the contents of the url into the file .
train
false
46,046
def read_keys(base, key): try: handle = RegOpenKeyEx(base, key) except RegError: return None L = [] i = 0 while 1: try: k = RegEnumKey(handle, i) except RegError: break L.append(k) i = (i + 1) return L
[ "def", "read_keys", "(", "base", ",", "key", ")", ":", "try", ":", "handle", "=", "RegOpenKeyEx", "(", "base", ",", "key", ")", "except", "RegError", ":", "return", "None", "L", "=", "[", "]", "i", "=", "0", "while", "1", ":", "try", ":", "k", "=", "RegEnumKey", "(", "handle", ",", "i", ")", "except", "RegError", ":", "break", "L", ".", "append", "(", "k", ")", "i", "=", "(", "i", "+", "1", ")", "return", "L" ]
return list of registry keys .
train
false
46,047
def _init_log(): global log orig_logger_cls = logging.getLoggerClass() logging.setLoggerClass(AstropyLogger) try: log = logging.getLogger('astropy') log._set_defaults() finally: logging.setLoggerClass(orig_logger_cls) return log
[ "def", "_init_log", "(", ")", ":", "global", "log", "orig_logger_cls", "=", "logging", ".", "getLoggerClass", "(", ")", "logging", ".", "setLoggerClass", "(", "AstropyLogger", ")", "try", ":", "log", "=", "logging", ".", "getLogger", "(", "'astropy'", ")", "log", ".", "_set_defaults", "(", ")", "finally", ":", "logging", ".", "setLoggerClass", "(", "orig_logger_cls", ")", "return", "log" ]
initializes the astropy log--in most circumstances this is called automatically when importing astropy .
train
false
46,048
def is_(a, b): return (a is b)
[ "def", "is_", "(", "a", ",", "b", ")", ":", "return", "(", "a", "is", "b", ")" ]
assert a is b .
train
false
46,049
def p_enum_seq(p): _parse_seq(p)
[ "def", "p_enum_seq", "(", "p", ")", ":", "_parse_seq", "(", "p", ")" ]
enum_seq : enum_item sep enum_seq | enum_item enum_seq .
train
false
46,052
def replace_methodname(format_string, methodname): methodnamehyphen = methodname.replace('_', '-') ret = format_string ret = ret.replace('{methodname}', methodname) ret = ret.replace('{methodnamehyphen}', methodnamehyphen) return ret
[ "def", "replace_methodname", "(", "format_string", ",", "methodname", ")", ":", "methodnamehyphen", "=", "methodname", ".", "replace", "(", "'_'", ",", "'-'", ")", "ret", "=", "format_string", "ret", "=", "ret", ".", "replace", "(", "'{methodname}'", ",", "methodname", ")", "ret", "=", "ret", ".", "replace", "(", "'{methodnamehyphen}'", ",", "methodnamehyphen", ")", "return", "ret" ]
partially format a format_string .
train
true
46,053
def get_occurrence(event_id, occurrence_id=None, year=None, month=None, day=None, hour=None, minute=None, second=None): if occurrence_id: occurrence = get_object_or_404(Occurrence, id=occurrence_id) event = occurrence.event elif all((year, month, day, hour, minute, second)): event = get_object_or_404(Event, id=event_id) occurrence = event.get_occurrence(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute), int(second))) if (occurrence is None): raise Http404 else: raise Http404 return (event, occurrence)
[ "def", "get_occurrence", "(", "event_id", ",", "occurrence_id", "=", "None", ",", "year", "=", "None", ",", "month", "=", "None", ",", "day", "=", "None", ",", "hour", "=", "None", ",", "minute", "=", "None", ",", "second", "=", "None", ")", ":", "if", "occurrence_id", ":", "occurrence", "=", "get_object_or_404", "(", "Occurrence", ",", "id", "=", "occurrence_id", ")", "event", "=", "occurrence", ".", "event", "elif", "all", "(", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", ")", ":", "event", "=", "get_object_or_404", "(", "Event", ",", "id", "=", "event_id", ")", "occurrence", "=", "event", ".", "get_occurrence", "(", "datetime", ".", "datetime", "(", "int", "(", "year", ")", ",", "int", "(", "month", ")", ",", "int", "(", "day", ")", ",", "int", "(", "hour", ")", ",", "int", "(", "minute", ")", ",", "int", "(", "second", ")", ")", ")", "if", "(", "occurrence", "is", "None", ")", ":", "raise", "Http404", "else", ":", "raise", "Http404", "return", "(", "event", ",", "occurrence", ")" ]
because occurrences dont have to be persisted .
train
false
46,055
def protected_resource(scopes=None, validator_cls=OAuth2Validator, server_cls=Server): _scopes = (scopes or []) def decorator(view_func): @wraps(view_func) def _validate(request, *args, **kwargs): validator = validator_cls() core = OAuthLibCore(server_cls(validator)) (valid, oauthlib_req) = core.verify_request(request, scopes=_scopes) if valid: request.resource_owner = oauthlib_req.user return view_func(request, *args, **kwargs) return HttpResponseForbidden() return _validate return decorator
[ "def", "protected_resource", "(", "scopes", "=", "None", ",", "validator_cls", "=", "OAuth2Validator", ",", "server_cls", "=", "Server", ")", ":", "_scopes", "=", "(", "scopes", "or", "[", "]", ")", "def", "decorator", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ")", "def", "_validate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "validator", "=", "validator_cls", "(", ")", "core", "=", "OAuthLibCore", "(", "server_cls", "(", "validator", ")", ")", "(", "valid", ",", "oauthlib_req", ")", "=", "core", ".", "verify_request", "(", "request", ",", "scopes", "=", "_scopes", ")", "if", "valid", ":", "request", ".", "resource_owner", "=", "oauthlib_req", ".", "user", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "HttpResponseForbidden", "(", ")", "return", "_validate", "return", "decorator" ]
decorator to protect views by providing oauth2 authentication out of the box .
train
false
46,056
def filter_by_key_prefix(dict, prefix): out_dict = {} for (key, value) in dict.items(): if key.startswith(prefix): out_dict[key] = value return out_dict
[ "def", "filter_by_key_prefix", "(", "dict", ",", "prefix", ")", ":", "out_dict", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "dict", ".", "items", "(", ")", ":", "if", "key", ".", "startswith", "(", "prefix", ")", ":", "out_dict", "[", "key", "]", "=", "value", "return", "out_dict" ]
return those and only those items in a dictionary whose keys have the given prefix .
train
false
46,057
def connects_to_emergency_number(number, region_code): return _matches_emergency_number_helper(number, region_code, True)
[ "def", "connects_to_emergency_number", "(", "number", ",", "region_code", ")", ":", "return", "_matches_emergency_number_helper", "(", "number", ",", "region_code", ",", "True", ")" ]
returns whether the given number .
train
false
46,058
def _hid_match(event_id, keys): keys = ([keys] if (not isinstance(keys, (list, tuple))) else keys) use_keys = [] for key in keys: if (not isinstance(key, string_types)): raise KeyError(('keys must be strings, got %s (%s)' % (type(key), key))) use_keys.extend((k for k in event_id.keys() if set(key.split('/')).issubset(k.split('/')))) if (len(use_keys) == 0): raise KeyError(('Event "%s" is not in Epochs.' % key)) use_keys = list(set(use_keys)) return use_keys
[ "def", "_hid_match", "(", "event_id", ",", "keys", ")", ":", "keys", "=", "(", "[", "keys", "]", "if", "(", "not", "isinstance", "(", "keys", ",", "(", "list", ",", "tuple", ")", ")", ")", "else", "keys", ")", "use_keys", "=", "[", "]", "for", "key", "in", "keys", ":", "if", "(", "not", "isinstance", "(", "key", ",", "string_types", ")", ")", ":", "raise", "KeyError", "(", "(", "'keys must be strings, got %s (%s)'", "%", "(", "type", "(", "key", ")", ",", "key", ")", ")", ")", "use_keys", ".", "extend", "(", "(", "k", "for", "k", "in", "event_id", ".", "keys", "(", ")", "if", "set", "(", "key", ".", "split", "(", "'/'", ")", ")", ".", "issubset", "(", "k", ".", "split", "(", "'/'", ")", ")", ")", ")", "if", "(", "len", "(", "use_keys", ")", "==", "0", ")", ":", "raise", "KeyError", "(", "(", "'Event \"%s\" is not in Epochs.'", "%", "key", ")", ")", "use_keys", "=", "list", "(", "set", "(", "use_keys", ")", ")", "return", "use_keys" ]
match event ids using hid selection .
train
false
46,060
def letter_form_to_array_form(array_form, group): a = list(array_form[:]) new_array = [] n = 1 symbols = group.symbols for i in range(len(a)): if (i == (len(a) - 1)): if (a[i] == a[(i - 1)]): if ((- a[i]) in symbols): new_array.append(((- a[i]), (- n))) else: new_array.append((a[i], n)) elif ((- a[i]) in symbols): new_array.append(((- a[i]), (-1))) else: new_array.append((a[i], 1)) return new_array elif (a[i] == a[(i + 1)]): n += 1 else: if ((- a[i]) in symbols): new_array.append(((- a[i]), (- n))) else: new_array.append((a[i], n)) n = 1
[ "def", "letter_form_to_array_form", "(", "array_form", ",", "group", ")", ":", "a", "=", "list", "(", "array_form", "[", ":", "]", ")", "new_array", "=", "[", "]", "n", "=", "1", "symbols", "=", "group", ".", "symbols", "for", "i", "in", "range", "(", "len", "(", "a", ")", ")", ":", "if", "(", "i", "==", "(", "len", "(", "a", ")", "-", "1", ")", ")", ":", "if", "(", "a", "[", "i", "]", "==", "a", "[", "(", "i", "-", "1", ")", "]", ")", ":", "if", "(", "(", "-", "a", "[", "i", "]", ")", "in", "symbols", ")", ":", "new_array", ".", "append", "(", "(", "(", "-", "a", "[", "i", "]", ")", ",", "(", "-", "n", ")", ")", ")", "else", ":", "new_array", ".", "append", "(", "(", "a", "[", "i", "]", ",", "n", ")", ")", "elif", "(", "(", "-", "a", "[", "i", "]", ")", "in", "symbols", ")", ":", "new_array", ".", "append", "(", "(", "(", "-", "a", "[", "i", "]", ")", ",", "(", "-", "1", ")", ")", ")", "else", ":", "new_array", ".", "append", "(", "(", "a", "[", "i", "]", ",", "1", ")", ")", "return", "new_array", "elif", "(", "a", "[", "i", "]", "==", "a", "[", "(", "i", "+", "1", ")", "]", ")", ":", "n", "+=", "1", "else", ":", "if", "(", "(", "-", "a", "[", "i", "]", ")", "in", "symbols", ")", ":", "new_array", ".", "append", "(", "(", "(", "-", "a", "[", "i", "]", ")", ",", "(", "-", "n", ")", ")", ")", "else", ":", "new_array", ".", "append", "(", "(", "a", "[", "i", "]", ",", "n", ")", ")", "n", "=", "1" ]
this method converts a list given with possible repetitions of elements in it .
train
false
46,061
def current_stream(): _lazy_init() return torch.cuda.Stream(_cdata=torch._C._cuda_getCurrentStream())
[ "def", "current_stream", "(", ")", ":", "_lazy_init", "(", ")", "return", "torch", ".", "cuda", ".", "Stream", "(", "_cdata", "=", "torch", ".", "_C", ".", "_cuda_getCurrentStream", "(", ")", ")" ]
returns a currently selected :class:stream .
train
false
46,062
def disable_color(): global LIGHT_GREEN LIGHT_GREEN = '' global LIGHT_RED LIGHT_RED = '' global LIGHT_BLUE LIGHT_BLUE = '' global DARK_RED DARK_RED = '' global END_COLOR END_COLOR = ''
[ "def", "disable_color", "(", ")", ":", "global", "LIGHT_GREEN", "LIGHT_GREEN", "=", "''", "global", "LIGHT_RED", "LIGHT_RED", "=", "''", "global", "LIGHT_BLUE", "LIGHT_BLUE", "=", "''", "global", "DARK_RED", "DARK_RED", "=", "''", "global", "END_COLOR", "END_COLOR", "=", "''" ]
disable colours by setting colour code constants to empty strings .
train
false
46,065
@hug.get() def hello(request): return 'Hello World!'
[ "@", "hug", ".", "get", "(", ")", "def", "hello", "(", "request", ")", ":", "return", "'Hello World!'" ]
sample flask api view that ruturns json with some data from config .
train
false
46,066
def add_whitelist_module(name, module=None): for (i, (_name, _module)) in enumerate(internal._WHITELIST_ADDED): if (_name == name): if (_module == module): return internal._WHITELIST_ADDED[i] = (_name, module) return internal._WHITELIST_ADDED.append((name, module))
[ "def", "add_whitelist_module", "(", "name", ",", "module", "=", "None", ")", ":", "for", "(", "i", ",", "(", "_name", ",", "_module", ")", ")", "in", "enumerate", "(", "internal", ".", "_WHITELIST_ADDED", ")", ":", "if", "(", "_name", "==", "name", ")", ":", "if", "(", "_module", "==", "module", ")", ":", "return", "internal", ".", "_WHITELIST_ADDED", "[", "i", "]", "=", "(", "_name", ",", "module", ")", "return", "internal", ".", "_WHITELIST_ADDED", ".", "append", "(", "(", "name", ",", "module", ")", ")" ]
api function to ensure that a certain module is made available to any plugins .
train
false
46,067
@register.tag(u'for') def do_for(parser, token): bits = token.split_contents() if (len(bits) < 4): raise TemplateSyntaxError((u"'for' statements should have at least four words: %s" % token.contents)) is_reversed = (bits[(-1)] == u'reversed') in_index = ((-3) if is_reversed else (-2)) if (bits[in_index] != u'in'): raise TemplateSyntaxError((u"'for' statements should use the format 'for x in y': %s" % token.contents)) loopvars = re.split(u' *, *', u' '.join(bits[1:in_index])) for var in loopvars: if ((not var) or (u' ' in var)): raise TemplateSyntaxError((u"'for' tag received an invalid argument: %s" % token.contents)) sequence = parser.compile_filter(bits[(in_index + 1)]) nodelist_loop = parser.parse((u'empty', u'endfor')) token = parser.next_token() if (token.contents == u'empty'): nodelist_empty = parser.parse((u'endfor',)) parser.delete_first_token() else: nodelist_empty = None return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
[ "@", "register", ".", "tag", "(", "u'for'", ")", "def", "do_for", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "<", "4", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "u\"'for' statements should have at least four words: %s\"", "%", "token", ".", "contents", ")", ")", "is_reversed", "=", "(", "bits", "[", "(", "-", "1", ")", "]", "==", "u'reversed'", ")", "in_index", "=", "(", "(", "-", "3", ")", "if", "is_reversed", "else", "(", "-", "2", ")", ")", "if", "(", "bits", "[", "in_index", "]", "!=", "u'in'", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "u\"'for' statements should use the format 'for x in y': %s\"", "%", "token", ".", "contents", ")", ")", "loopvars", "=", "re", ".", "split", "(", "u' *, *'", ",", "u' '", ".", "join", "(", "bits", "[", "1", ":", "in_index", "]", ")", ")", "for", "var", "in", "loopvars", ":", "if", "(", "(", "not", "var", ")", "or", "(", "u' '", "in", "var", ")", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "u\"'for' tag received an invalid argument: %s\"", "%", "token", ".", "contents", ")", ")", "sequence", "=", "parser", ".", "compile_filter", "(", "bits", "[", "(", "in_index", "+", "1", ")", "]", ")", "nodelist_loop", "=", "parser", ".", "parse", "(", "(", "u'empty'", ",", "u'endfor'", ")", ")", "token", "=", "parser", ".", "next_token", "(", ")", "if", "(", "token", ".", "contents", "==", "u'empty'", ")", ":", "nodelist_empty", "=", "parser", ".", "parse", "(", "(", "u'endfor'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "else", ":", "nodelist_empty", "=", "None", "return", "ForNode", "(", "loopvars", ",", "sequence", ",", "is_reversed", ",", "nodelist_loop", ",", "nodelist_empty", ")" ]
loop over each item in an array .
train
false
46,068
def truncate_name(name, length=None, hash_len=4): if ((length is None) or (len(name) <= length)): return name hash = md5_constructor(name).hexdigest()[:hash_len] return ('%s%s' % (name[:(length - hash_len)], hash))
[ "def", "truncate_name", "(", "name", ",", "length", "=", "None", ",", "hash_len", "=", "4", ")", ":", "if", "(", "(", "length", "is", "None", ")", "or", "(", "len", "(", "name", ")", "<=", "length", ")", ")", ":", "return", "name", "hash", "=", "md5_constructor", "(", "name", ")", ".", "hexdigest", "(", ")", "[", ":", "hash_len", "]", "return", "(", "'%s%s'", "%", "(", "name", "[", ":", "(", "length", "-", "hash_len", ")", "]", ",", "hash", ")", ")" ]
shortens a string to a repeatable mangled version with the given length .
train
false
46,069
def get_default_keychain(user=None, domain='user'): cmd = 'security default-keychain -d {0}'.format(domain) return __salt__['cmd.run'](cmd, runas=user)
[ "def", "get_default_keychain", "(", "user", "=", "None", ",", "domain", "=", "'user'", ")", ":", "cmd", "=", "'security default-keychain -d {0}'", ".", "format", "(", "domain", ")", "return", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "runas", "=", "user", ")" ]
get the default keychain user the user to check the default keychain of domain the domain to use valid values are user|system|common|dynamic .
train
true
46,070
def server_enable(s_name, **connection_args): ret = True server = _server_get(s_name, **connection_args) if (server is None): return False if (server.get_state() == 'ENABLED'): return True nitro = _connect(**connection_args) if (nitro is None): return False try: NSServer.enable(nitro, server) except NSNitroError as error: log.debug('netscaler module error - NSServer.enable() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "server_enable", "(", "s_name", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "server", "=", "_server_get", "(", "s_name", ",", "**", "connection_args", ")", "if", "(", "server", "is", "None", ")", ":", "return", "False", "if", "(", "server", ".", "get_state", "(", ")", "==", "'ENABLED'", ")", ":", "return", "True", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "try", ":", "NSServer", ".", "enable", "(", "nitro", ",", "server", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSServer.enable() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
enables a server globally cli example: .
train
true
46,071
def get_row(line_contents, column_names): row = [] for column_name in column_names: line_value = get_nested_value(line_contents, column_name) if isinstance(line_value, unicode): row.append('{0}'.format(line_value.encode('utf-8'))) elif (line_value is not None): row.append('{0}'.format(line_value)) else: row.append('') return row
[ "def", "get_row", "(", "line_contents", ",", "column_names", ")", ":", "row", "=", "[", "]", "for", "column_name", "in", "column_names", ":", "line_value", "=", "get_nested_value", "(", "line_contents", ",", "column_name", ")", "if", "isinstance", "(", "line_value", ",", "unicode", ")", ":", "row", ".", "append", "(", "'{0}'", ".", "format", "(", "line_value", ".", "encode", "(", "'utf-8'", ")", ")", ")", "elif", "(", "line_value", "is", "not", "None", ")", ":", "row", ".", "append", "(", "'{0}'", ".", "format", "(", "line_value", ")", ")", "else", ":", "row", ".", "append", "(", "''", ")", "return", "row" ]
return a csv compatible row given column names and a dict .
train
false
46,072
def stopTouchApp(): if (EventLoop is None): return if (EventLoop.status != 'started'): return Logger.info('Base: Leaving application in progress...') EventLoop.close()
[ "def", "stopTouchApp", "(", ")", ":", "if", "(", "EventLoop", "is", "None", ")", ":", "return", "if", "(", "EventLoop", ".", "status", "!=", "'started'", ")", ":", "return", "Logger", ".", "info", "(", "'Base: Leaving application in progress...'", ")", "EventLoop", ".", "close", "(", ")" ]
stop the current application by leaving the main loop .
train
false
46,073
def _linesearch_powell(func, p, xi, tol=0.001): def myfunc(alpha): return func((p + (alpha * xi))) (alpha_min, fret, iter, num) = brent(myfunc, full_output=1, tol=tol) xi = (alpha_min * xi) return (squeeze(fret), (p + xi), xi)
[ "def", "_linesearch_powell", "(", "func", ",", "p", ",", "xi", ",", "tol", "=", "0.001", ")", ":", "def", "myfunc", "(", "alpha", ")", ":", "return", "func", "(", "(", "p", "+", "(", "alpha", "*", "xi", ")", ")", ")", "(", "alpha_min", ",", "fret", ",", "iter", ",", "num", ")", "=", "brent", "(", "myfunc", ",", "full_output", "=", "1", ",", "tol", "=", "tol", ")", "xi", "=", "(", "alpha_min", "*", "xi", ")", "return", "(", "squeeze", "(", "fret", ")", ",", "(", "p", "+", "xi", ")", ",", "xi", ")" ]
line-search algorithm using fminbound .
train
false
46,074
def transform_line(line): if (line == 'import '): return '' for (old_module, new_module) in REPLACEMENTS.iteritems(): result = transform_old_to_new(line, old_module, new_module) if (result is not None): return result return line
[ "def", "transform_line", "(", "line", ")", ":", "if", "(", "line", "==", "'import '", ")", ":", "return", "''", "for", "(", "old_module", ",", "new_module", ")", "in", "REPLACEMENTS", ".", "iteritems", "(", ")", ":", "result", "=", "transform_old_to_new", "(", "line", ",", "old_module", ",", "new_module", ")", "if", "(", "result", "is", "not", "None", ")", ":", "return", "result", "return", "line" ]
transforms an import line in a pb2 module .
train
false
46,075
def cms_verify(formatted, signing_cert_file_name, ca_file_name): _ensure_subprocess() process = subprocess.Popen(['openssl', 'cms', '-verify', '-certfile', signing_cert_file_name, '-CAfile', ca_file_name, '-inform', 'PEM', '-nosmimecap', '-nodetach', '-nocerts', '-noattr'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (output, err) = process.communicate(formatted) retcode = process.poll() if retcode: LOG.error((_('Verify error: %s') % err)) raise subprocess.CalledProcessError(retcode, 'openssl', output=err) return output
[ "def", "cms_verify", "(", "formatted", ",", "signing_cert_file_name", ",", "ca_file_name", ")", ":", "_ensure_subprocess", "(", ")", "process", "=", "subprocess", ".", "Popen", "(", "[", "'openssl'", ",", "'cms'", ",", "'-verify'", ",", "'-certfile'", ",", "signing_cert_file_name", ",", "'-CAfile'", ",", "ca_file_name", ",", "'-inform'", ",", "'PEM'", ",", "'-nosmimecap'", ",", "'-nodetach'", ",", "'-nocerts'", ",", "'-noattr'", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "output", ",", "err", ")", "=", "process", ".", "communicate", "(", "formatted", ")", "retcode", "=", "process", ".", "poll", "(", ")", "if", "retcode", ":", "LOG", ".", "error", "(", "(", "_", "(", "'Verify error: %s'", ")", "%", "err", ")", ")", "raise", "subprocess", ".", "CalledProcessError", "(", "retcode", ",", "'openssl'", ",", "output", "=", "err", ")", "return", "output" ]
verifies the signature of the contents iaw cms syntax .
train
false
46,076
def VisualWait(verbose, *args, **kwargs): return (_VisualWait(*args, **kwargs) if (verbose > 1) else _NotVisualWait())
[ "def", "VisualWait", "(", "verbose", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "(", "_VisualWait", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "verbose", ">", "1", ")", "else", "_NotVisualWait", "(", ")", ")" ]
wonderful visual progress indication .
train
false
46,077
def unquote_string(quoted): if (len(quoted) < 2): return quoted elif ((quoted[0] != '"') or (quoted[(-1)] != '"')): return quoted tmp_quoted = quoted[1:(-1)] if ('\\' not in tmp_quoted): return tmp_quoted elif ('\\\\' not in tmp_quoted): return tmp_quoted.replace('\\', '') else: return '\\'.join([q.replace('\\', '') for q in tmp_quoted.split('\\\\')])
[ "def", "unquote_string", "(", "quoted", ")", ":", "if", "(", "len", "(", "quoted", ")", "<", "2", ")", ":", "return", "quoted", "elif", "(", "(", "quoted", "[", "0", "]", "!=", "'\"'", ")", "or", "(", "quoted", "[", "(", "-", "1", ")", "]", "!=", "'\"'", ")", ")", ":", "return", "quoted", "tmp_quoted", "=", "quoted", "[", "1", ":", "(", "-", "1", ")", "]", "if", "(", "'\\\\'", "not", "in", "tmp_quoted", ")", ":", "return", "tmp_quoted", "elif", "(", "'\\\\\\\\'", "not", "in", "tmp_quoted", ")", ":", "return", "tmp_quoted", ".", "replace", "(", "'\\\\'", ",", "''", ")", "else", ":", "return", "'\\\\'", ".", "join", "(", "[", "q", ".", "replace", "(", "'\\\\'", ",", "''", ")", "for", "q", "in", "tmp_quoted", ".", "split", "(", "'\\\\\\\\'", ")", "]", ")" ]
unquote an rfc 7320 "quoted-string" .
train
false
46,080
def clean_dir(directory): sh('find {dir} -type f -delete'.format(dir=directory))
[ "def", "clean_dir", "(", "directory", ")", ":", "sh", "(", "'find {dir} -type f -delete'", ".", "format", "(", "dir", "=", "directory", ")", ")" ]
delete all the files from the specified directory .
train
false
46,082
@cmd def build_exe(): build() sh(('%s setup.py bdist_wininst' % PYTHON))
[ "@", "cmd", "def", "build_exe", "(", ")", ":", "build", "(", ")", "sh", "(", "(", "'%s setup.py bdist_wininst'", "%", "PYTHON", ")", ")" ]
create exe file .
train
false
46,083
def read_history(): try: readline.read_history_file(c['HISTORY_FILENAME']) except: pass
[ "def", "read_history", "(", ")", ":", "try", ":", "readline", ".", "read_history_file", "(", "c", "[", "'HISTORY_FILENAME'", "]", ")", "except", ":", "pass" ]
read history file .
train
false
46,086
def gidcounter(): global GCOUNT GCOUNT += 1 return ('%s-%s' % (time.strftime(DATESTRING), GCOUNT))
[ "def", "gidcounter", "(", ")", ":", "global", "GCOUNT", "GCOUNT", "+=", "1", "return", "(", "'%s-%s'", "%", "(", "time", ".", "strftime", "(", "DATESTRING", ")", ",", "GCOUNT", ")", ")" ]
makes globally unique ids .
train
false
46,087
def base64decode(value): return base64.b64decode(value)
[ "def", "base64decode", "(", "value", ")", ":", "return", "base64", ".", "b64decode", "(", "value", ")" ]
decodes string value from base64 to plain format .
train
false
46,088
def log_to_file(filename, level=DEBUG): l = logging.getLogger('paramiko') if (len(l.handlers) > 0): return l.setLevel(level) f = open(filename, 'w') lh = logging.StreamHandler(f) lh.setFormatter(logging.Formatter('%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d %(name)s: %(message)s', '%Y%m%d-%H:%M:%S')) l.addHandler(lh)
[ "def", "log_to_file", "(", "filename", ",", "level", "=", "DEBUG", ")", ":", "l", "=", "logging", ".", "getLogger", "(", "'paramiko'", ")", "if", "(", "len", "(", "l", ".", "handlers", ")", ">", "0", ")", ":", "return", "l", ".", "setLevel", "(", "level", ")", "f", "=", "open", "(", "filename", ",", "'w'", ")", "lh", "=", "logging", ".", "StreamHandler", "(", "f", ")", "lh", ".", "setFormatter", "(", "logging", ".", "Formatter", "(", "'%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d %(name)s: %(message)s'", ",", "'%Y%m%d-%H:%M:%S'", ")", ")", "l", ".", "addHandler", "(", "lh", ")" ]
send paramiko logs to a logfile .
train
true
46,089
def _create_profile_d_file(prefix): from fabtools.require.files import file as require_file require_file('/etc/profile.d/java.sh', contents=dedent((' export JAVA_HOME="%s/jdk"\n export PATH="$JAVA_HOME/bin:$PATH"\n ' % prefix)), mode='0755', use_sudo=True)
[ "def", "_create_profile_d_file", "(", "prefix", ")", ":", "from", "fabtools", ".", "require", ".", "files", "import", "file", "as", "require_file", "require_file", "(", "'/etc/profile.d/java.sh'", ",", "contents", "=", "dedent", "(", "(", "' export JAVA_HOME=\"%s/jdk\"\\n export PATH=\"$JAVA_HOME/bin:$PATH\"\\n '", "%", "prefix", ")", ")", ",", "mode", "=", "'0755'", ",", "use_sudo", "=", "True", ")" ]
create profile .
train
false
46,091
def test_config_alexa_entity_id_to_number(): conf = Config(None, {'type': 'alexa'}) number = conf.entity_id_to_number('light.test') assert (number == 'light.test') number = conf.entity_id_to_number('light.test') assert (number == 'light.test') number = conf.entity_id_to_number('light.test2') assert (number == 'light.test2') entity_id = conf.number_to_entity_id('light.test') assert (entity_id == 'light.test')
[ "def", "test_config_alexa_entity_id_to_number", "(", ")", ":", "conf", "=", "Config", "(", "None", ",", "{", "'type'", ":", "'alexa'", "}", ")", "number", "=", "conf", ".", "entity_id_to_number", "(", "'light.test'", ")", "assert", "(", "number", "==", "'light.test'", ")", "number", "=", "conf", ".", "entity_id_to_number", "(", "'light.test'", ")", "assert", "(", "number", "==", "'light.test'", ")", "number", "=", "conf", ".", "entity_id_to_number", "(", "'light.test2'", ")", "assert", "(", "number", "==", "'light.test2'", ")", "entity_id", "=", "conf", ".", "number_to_entity_id", "(", "'light.test'", ")", "assert", "(", "entity_id", "==", "'light.test'", ")" ]
test config adheres to the type .
train
false
46,092
def resolve_attrs(test_method): def _wrapper(self, *args): new_args = [getattr(self, arg) for arg in args] return test_method(self, *new_args) return _wrapper
[ "def", "resolve_attrs", "(", "test_method", ")", ":", "def", "_wrapper", "(", "self", ",", "*", "args", ")", ":", "new_args", "=", "[", "getattr", "(", "self", ",", "arg", ")", "for", "arg", "in", "args", "]", "return", "test_method", "(", "self", ",", "*", "new_args", ")", "return", "_wrapper" ]
helper function used with ddt .
train
false
46,093
@requires_application() def test_circle_draw(): with TestingCanvas() as c: ellipse = visuals.Ellipse(center=(75, 35, 0), radius=20, color=(1, 0, 0, 1), parent=c.scene) assert_image_approved(c.render(), 'visuals/circle1.png') ellipse.parent = None ellipse = visuals.Ellipse(center=(75, 35, 0), radius=20, color=(1, 0, 0, 1), border_color=(0, 1, 1, 1), parent=c.scene) assert_image_approved(c.render(), 'visuals/circle2.png') ellipse.parent = None ellipse = visuals.Ellipse(center=(75, 35, 0), radius=20, border_color=(0, 1, 1, 1), parent=c.scene) assert_image_approved(c.render(), 'visuals/circle3.png', min_corr=0.7)
[ "@", "requires_application", "(", ")", "def", "test_circle_draw", "(", ")", ":", "with", "TestingCanvas", "(", ")", "as", "c", ":", "ellipse", "=", "visuals", ".", "Ellipse", "(", "center", "=", "(", "75", ",", "35", ",", "0", ")", ",", "radius", "=", "20", ",", "color", "=", "(", "1", ",", "0", ",", "0", ",", "1", ")", ",", "parent", "=", "c", ".", "scene", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/circle1.png'", ")", "ellipse", ".", "parent", "=", "None", "ellipse", "=", "visuals", ".", "Ellipse", "(", "center", "=", "(", "75", ",", "35", ",", "0", ")", ",", "radius", "=", "20", ",", "color", "=", "(", "1", ",", "0", ",", "0", ",", "1", ")", ",", "border_color", "=", "(", "0", ",", "1", ",", "1", ",", "1", ")", ",", "parent", "=", "c", ".", "scene", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/circle2.png'", ")", "ellipse", ".", "parent", "=", "None", "ellipse", "=", "visuals", ".", "Ellipse", "(", "center", "=", "(", "75", ",", "35", ",", "0", ")", ",", "radius", "=", "20", ",", "border_color", "=", "(", "0", ",", "1", ",", "1", ",", "1", ")", ",", "parent", "=", "c", ".", "scene", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "'visuals/circle3.png'", ",", "min_corr", "=", "0.7", ")" ]
test drawing circles without transform using ellipsevisual .
train
false
46,095
def deprecatedDeferredGenerator(f): return runWithWarningsSuppressed([SUPPRESS(message='twisted.internet.defer.deferredGenerator was deprecated')], deferredGenerator, f)
[ "def", "deprecatedDeferredGenerator", "(", "f", ")", ":", "return", "runWithWarningsSuppressed", "(", "[", "SUPPRESS", "(", "message", "=", "'twisted.internet.defer.deferredGenerator was deprecated'", ")", "]", ",", "deferredGenerator", ",", "f", ")" ]
calls l{deferredgenerator} while suppressing the deprecation warning .
train
false
46,097
def extract_id(url): if (not isinstance(url, basestring)): return m = re.search(u'((?:nm|tt)[\\d]{7})', url) if m: return m.group(1)
[ "def", "extract_id", "(", "url", ")", ":", "if", "(", "not", "isinstance", "(", "url", ",", "basestring", ")", ")", ":", "return", "m", "=", "re", ".", "search", "(", "u'((?:nm|tt)[\\\\d]{7})'", ",", "url", ")", "if", "m", ":", "return", "m", ".", "group", "(", "1", ")" ]
return imdb id of the given url .
train
false
46,098
def model_ngettext(obj, n=None): if isinstance(obj, models.query.QuerySet): if (n is None): n = obj.count() obj = obj.model d = model_format_dict(obj) (singular, plural) = (d['verbose_name'], d['verbose_name_plural']) return ungettext(singular, plural, (n or 0))
[ "def", "model_ngettext", "(", "obj", ",", "n", "=", "None", ")", ":", "if", "isinstance", "(", "obj", ",", "models", ".", "query", ".", "QuerySet", ")", ":", "if", "(", "n", "is", "None", ")", ":", "n", "=", "obj", ".", "count", "(", ")", "obj", "=", "obj", ".", "model", "d", "=", "model_format_dict", "(", "obj", ")", "(", "singular", ",", "plural", ")", "=", "(", "d", "[", "'verbose_name'", "]", ",", "d", "[", "'verbose_name_plural'", "]", ")", "return", "ungettext", "(", "singular", ",", "plural", ",", "(", "n", "or", "0", ")", ")" ]
return the appropriate verbose_name or verbose_name_plural value for obj depending on the count n .
train
false
46,101
def _iterate_mri_slices(name, ind, global_id, slides_klass, data, cmap, image_format='png'): img_klass = ('slideimg-%s' % name) caption = (u'Slice %s %s' % (name, ind)) slice_id = ('%s-%s-%s' % (name, global_id, ind)) div_klass = ('span12 %s' % slides_klass) img = _build_image(data, cmap=cmap) first = (True if (ind == 0) else False) html = _build_html_image(img, slice_id, div_klass, img_klass, caption, first) return (ind, html)
[ "def", "_iterate_mri_slices", "(", "name", ",", "ind", ",", "global_id", ",", "slides_klass", ",", "data", ",", "cmap", ",", "image_format", "=", "'png'", ")", ":", "img_klass", "=", "(", "'slideimg-%s'", "%", "name", ")", "caption", "=", "(", "u'Slice %s %s'", "%", "(", "name", ",", "ind", ")", ")", "slice_id", "=", "(", "'%s-%s-%s'", "%", "(", "name", ",", "global_id", ",", "ind", ")", ")", "div_klass", "=", "(", "'span12 %s'", "%", "slides_klass", ")", "img", "=", "_build_image", "(", "data", ",", "cmap", "=", "cmap", ")", "first", "=", "(", "True", "if", "(", "ind", "==", "0", ")", "else", "False", ")", "html", "=", "_build_html_image", "(", "img", ",", "slice_id", ",", "div_klass", ",", "img_klass", ",", "caption", ",", "first", ")", "return", "(", "ind", ",", "html", ")" ]
auxiliary function for parallel processing of mri slices .
train
false
46,102
@downgrades(2) def _downgrade_v2(op): op.drop_index('ix_equities_fuzzy_symbol') op.drop_index('ix_equities_company_symbol') with op.batch_alter_table('equities') as batch_op: batch_op.drop_column('auto_close_date') op.create_index('ix_equities_fuzzy_symbol', table_name='equities', columns=['fuzzy_symbol']) op.create_index('ix_equities_company_symbol', table_name='equities', columns=['company_symbol'])
[ "@", "downgrades", "(", "2", ")", "def", "_downgrade_v2", "(", "op", ")", ":", "op", ".", "drop_index", "(", "'ix_equities_fuzzy_symbol'", ")", "op", ".", "drop_index", "(", "'ix_equities_company_symbol'", ")", "with", "op", ".", "batch_alter_table", "(", "'equities'", ")", "as", "batch_op", ":", "batch_op", ".", "drop_column", "(", "'auto_close_date'", ")", "op", ".", "create_index", "(", "'ix_equities_fuzzy_symbol'", ",", "table_name", "=", "'equities'", ",", "columns", "=", "[", "'fuzzy_symbol'", "]", ")", "op", ".", "create_index", "(", "'ix_equities_company_symbol'", ",", "table_name", "=", "'equities'", ",", "columns", "=", "[", "'company_symbol'", "]", ")" ]
downgrade assets db by removing the auto_close_date column .
train
true
46,103
def LU_solve(matlist, variable, constant, K): new_matlist = copy.deepcopy(matlist) nrow = len(new_matlist) (L, U) = LU(new_matlist, K) y = [[i] for i in symbols(('y:%i' % nrow))] forward_substitution(L, y, constant, K) backward_substitution(U, variable, y, K) return variable
[ "def", "LU_solve", "(", "matlist", ",", "variable", ",", "constant", ",", "K", ")", ":", "new_matlist", "=", "copy", ".", "deepcopy", "(", "matlist", ")", "nrow", "=", "len", "(", "new_matlist", ")", "(", "L", ",", "U", ")", "=", "LU", "(", "new_matlist", ",", "K", ")", "y", "=", "[", "[", "i", "]", "for", "i", "in", "symbols", "(", "(", "'y:%i'", "%", "nrow", ")", ")", "]", "forward_substitution", "(", "L", ",", "y", ",", "constant", ",", "K", ")", "backward_substitution", "(", "U", ",", "variable", ",", "y", ",", "K", ")", "return", "variable" ]
solves a system of equations using lu decomposition given a matrix of coefficients .
train
false
46,104
def coerce_numbers_to_my_dtype(f): @wraps(f) def method(self, other): if isinstance(other, Number): other = coerce_to_dtype(self.dtype, other) return f(self, other) return method
[ "def", "coerce_numbers_to_my_dtype", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "method", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "Number", ")", ":", "other", "=", "coerce_to_dtype", "(", "self", ".", "dtype", ",", "other", ")", "return", "f", "(", "self", ",", "other", ")", "return", "method" ]
a decorator for methods whose signature is f that coerces other to self .
train
true
46,105
def first_match(predicate, iterable): for item in iterable: if predicate(item): return item return None
[ "def", "first_match", "(", "predicate", ",", "iterable", ")", ":", "for", "item", "in", "iterable", ":", "if", "predicate", "(", "item", ")", ":", "return", "item", "return", "None" ]
gets the first element matched by the predicate in the iterable .
train
false
46,106
def delete_settings_from_fixture(fixture): deleted_settings = {} for settingsgroup in fixture: group = SettingsGroup.query.filter_by(key=settingsgroup[0]).first() deleted_settings[group] = [] for settings in settingsgroup[1][u'settings']: setting = Setting.query.filter_by(key=settings[0]).first() if setting: deleted_settings[group].append(setting) setting.delete() group.delete() return deleted_settings
[ "def", "delete_settings_from_fixture", "(", "fixture", ")", ":", "deleted_settings", "=", "{", "}", "for", "settingsgroup", "in", "fixture", ":", "group", "=", "SettingsGroup", ".", "query", ".", "filter_by", "(", "key", "=", "settingsgroup", "[", "0", "]", ")", ".", "first", "(", ")", "deleted_settings", "[", "group", "]", "=", "[", "]", "for", "settings", "in", "settingsgroup", "[", "1", "]", "[", "u'settings'", "]", ":", "setting", "=", "Setting", ".", "query", ".", "filter_by", "(", "key", "=", "settings", "[", "0", "]", ")", ".", "first", "(", ")", "if", "setting", ":", "deleted_settings", "[", "group", "]", ".", "append", "(", "setting", ")", "setting", ".", "delete", "(", ")", "group", ".", "delete", "(", ")", "return", "deleted_settings" ]
deletes the settings from a fixture from the database .
train
false
46,107
def get_auth_username(): return LDAP_USERNAME.get()
[ "def", "get_auth_username", "(", ")", ":", "return", "LDAP_USERNAME", ".", "get", "(", ")" ]
backward compatibility .
train
false
46,108
def featurenormal(X, axis=0): mu = np.array(X).mean((not axis)) X_norm = (X - mu.reshape(X.shape[0], (-1))) sigma = np.std(X_norm, axis=(not axis)) X_norm = (X_norm / sigma.reshape(X.shape[0], (-1))) return (X_norm, mu, sigma)
[ "def", "featurenormal", "(", "X", ",", "axis", "=", "0", ")", ":", "mu", "=", "np", ".", "array", "(", "X", ")", ".", "mean", "(", "(", "not", "axis", ")", ")", "X_norm", "=", "(", "X", "-", "mu", ".", "reshape", "(", "X", ".", "shape", "[", "0", "]", ",", "(", "-", "1", ")", ")", ")", "sigma", "=", "np", ".", "std", "(", "X_norm", ",", "axis", "=", "(", "not", "axis", ")", ")", "X_norm", "=", "(", "X_norm", "/", "sigma", ".", "reshape", "(", "X", ".", "shape", "[", "0", "]", ",", "(", "-", "1", ")", ")", ")", "return", "(", "X_norm", ",", "mu", ",", "sigma", ")" ]
x is n*m axis==0: columns axis==1: rows .
train
false
46,109
def html_unquote(s, encoding=None): if isinstance(s, six.binary_type): s = s.decode((encoding or default_encoding)) return _unquote_re.sub(_entity_subber, s)
[ "def", "html_unquote", "(", "s", ",", "encoding", "=", "None", ")", ":", "if", "isinstance", "(", "s", ",", "six", ".", "binary_type", ")", ":", "s", "=", "s", ".", "decode", "(", "(", "encoding", "or", "default_encoding", ")", ")", "return", "_unquote_re", ".", "sub", "(", "_entity_subber", ",", "s", ")" ]
unquote entities in html .
train
false
46,110
def srcroute_disable(rt_table, ipaddr): run(settings.ip, 'rule', 'del', 'from', ipaddr, 'table', rt_table) run(settings.ip, 'route', 'flush', 'cache')
[ "def", "srcroute_disable", "(", "rt_table", ",", "ipaddr", ")", ":", "run", "(", "settings", ".", "ip", ",", "'rule'", ",", "'del'", ",", "'from'", ",", "ipaddr", ",", "'table'", ",", "rt_table", ")", "run", "(", "settings", ".", "ip", ",", "'route'", ",", "'flush'", ",", "'cache'", ")" ]
disable routing policy for specified source ip address .
train
false