id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
3,095
def create_address(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The create_address function must be called with -f or --function.') if ((not kwargs) or ('name' not in kwargs)): log.error('A name must be specified when creating an address.') return False if ('region' not in kwargs): log.error('A region must be specified for the address.') return False name = kwargs['name'] ex_region = kwargs['region'] ex_address = kwargs.get('address', None) conn = get_conn() __utils__['cloud.fire_event']('event', 'create address', 'salt/cloud/address/creating', args=kwargs, sock_dir=__opts__['sock_dir'], transport=__opts__['transport']) addy = conn.ex_create_address(name, ex_region, ex_address) __utils__['cloud.fire_event']('event', 'created address', 'salt/cloud/address/created', args=kwargs, sock_dir=__opts__['sock_dir'], transport=__opts__['transport']) log.info(('Created GCE Address ' + name)) return _expand_address(addy)
[ "def", "create_address", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The create_address function must be called with -f or --function.'", ")", "if", "(", "(", "not", "kwargs", ")", "or", "(", "'name'", "not", "in", "kwargs", ")", ")", ":", "log", ".", "error", "(", "'A name must be specified when creating an address.'", ")", "return", "False", "if", "(", "'region'", "not", "in", "kwargs", ")", ":", "log", ".", "error", "(", "'A region must be specified for the address.'", ")", "return", "False", "name", "=", "kwargs", "[", "'name'", "]", "ex_region", "=", "kwargs", "[", "'region'", "]", "ex_address", "=", "kwargs", ".", "get", "(", "'address'", ",", "None", ")", "conn", "=", "get_conn", "(", ")", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'create address'", ",", "'salt/cloud/address/creating'", ",", "args", "=", "kwargs", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "addy", "=", "conn", ".", "ex_create_address", "(", "name", ",", "ex_region", ",", "ex_address", ")", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'created address'", ",", "'salt/cloud/address/created'", ",", "args", "=", "kwargs", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "log", ".", "info", "(", "(", "'Created GCE Address '", "+", "name", ")", ")", "return", "_expand_address", "(", "addy", ")" ]
create a static address in a region .
train
true
3,096
def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python', 'distribute'), include_editables=True, editables_only=False): if local_only: local_test = dist_is_local else: local_test = (lambda d: True) if include_editables: editable_test = (lambda d: True) else: editable_test = (lambda d: (not dist_is_editable(d))) if editables_only: editables_only_test = (lambda d: dist_is_editable(d)) else: editables_only_test = (lambda d: True) return [d for d in pkg_resources.working_set if (local_test(d) and (d.key not in skip) and editable_test(d) and editables_only_test(d))]
[ "def", "get_installed_distributions", "(", "local_only", "=", "True", ",", "skip", "=", "(", "'setuptools'", ",", "'pip'", ",", "'python'", ",", "'distribute'", ")", ",", "include_editables", "=", "True", ",", "editables_only", "=", "False", ")", ":", "if", "local_only", ":", "local_test", "=", "dist_is_local", "else", ":", "local_test", "=", "(", "lambda", "d", ":", "True", ")", "if", "include_editables", ":", "editable_test", "=", "(", "lambda", "d", ":", "True", ")", "else", ":", "editable_test", "=", "(", "lambda", "d", ":", "(", "not", "dist_is_editable", "(", "d", ")", ")", ")", "if", "editables_only", ":", "editables_only_test", "=", "(", "lambda", "d", ":", "dist_is_editable", "(", "d", ")", ")", "else", ":", "editables_only_test", "=", "(", "lambda", "d", ":", "True", ")", "return", "[", "d", "for", "d", "in", "pkg_resources", ".", "working_set", "if", "(", "local_test", "(", "d", ")", "and", "(", "d", ".", "key", "not", "in", "skip", ")", "and", "editable_test", "(", "d", ")", "and", "editables_only_test", "(", "d", ")", ")", "]" ]
return a list of installed distribution objects .
train
true
3,097
def FlagCxx11Features(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] include = Match('\\s*#\\s*include\\s+[<"]([^<"]+)[">]', line) if (include and include.group(1).startswith('tr1/')): error(filename, linenum, 'build/c++tr1', 5, ('C++ TR1 headers such as <%s> are unapproved.' % include.group(1))) if (include and (include.group(1) in ('cfenv', 'condition_variable', 'fenv.h', 'future', 'mutex', 'thread', 'chrono', 'ratio', 'regex', 'system_error'))): error(filename, linenum, 'build/c++11', 5, ('<%s> is an unapproved C++11 header.' % include.group(1))) if (Match('\\s*#', line) and (not Match('\\s*#\\s*define\\b', line))): return for top_name in ('alignment_of', 'aligned_union'): if Search(('\\bstd::%s\\b' % top_name), line): error(filename, linenum, 'build/c++11', 5, ('std::%s is an unapproved C++11 class or function. Send c-style an example of where it would make your code more readable, and they may let you use it.' % top_name))
[ "def", "FlagCxx11Features", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "include", "=", "Match", "(", "'\\\\s*#\\\\s*include\\\\s+[<\"]([^<\"]+)[\">]'", ",", "line", ")", "if", "(", "include", "and", "include", ".", "group", "(", "1", ")", ".", "startswith", "(", "'tr1/'", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++tr1'", ",", "5", ",", "(", "'C++ TR1 headers such as <%s> are unapproved.'", "%", "include", ".", "group", "(", "1", ")", ")", ")", "if", "(", "include", "and", "(", "include", ".", "group", "(", "1", ")", "in", "(", "'cfenv'", ",", "'condition_variable'", ",", "'fenv.h'", ",", "'future'", ",", "'mutex'", ",", "'thread'", ",", "'chrono'", ",", "'ratio'", ",", "'regex'", ",", "'system_error'", ")", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'<%s> is an unapproved C++11 header.'", "%", "include", ".", "group", "(", "1", ")", ")", ")", "if", "(", "Match", "(", "'\\\\s*#'", ",", "line", ")", "and", "(", "not", "Match", "(", "'\\\\s*#\\\\s*define\\\\b'", ",", "line", ")", ")", ")", ":", "return", "for", "top_name", "in", "(", "'alignment_of'", ",", "'aligned_union'", ")", ":", "if", "Search", "(", "(", "'\\\\bstd::%s\\\\b'", "%", "top_name", ")", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'std::%s is an unapproved C++11 class or function. Send c-style an example of where it would make your code more readable, and they may let you use it.'", "%", "top_name", ")", ")" ]
flag those c++11 features that we only allow in certain places .
train
false
3,098
def get_recording_dirs(data_dir): filtered_recording_dirs = [] if is_pupil_rec_dir(data_dir): filtered_recording_dirs.append(data_dir) for (root, dirs, files) in os.walk(data_dir): filtered_recording_dirs += [os.path.join(root, d) for d in dirs if ((not d.startswith('.')) and is_pupil_rec_dir(os.path.join(root, d)))] logger.debug('Filtered Recording Dirs: {}'.format(filtered_recording_dirs)) return filtered_recording_dirs
[ "def", "get_recording_dirs", "(", "data_dir", ")", ":", "filtered_recording_dirs", "=", "[", "]", "if", "is_pupil_rec_dir", "(", "data_dir", ")", ":", "filtered_recording_dirs", ".", "append", "(", "data_dir", ")", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "data_dir", ")", ":", "filtered_recording_dirs", "+=", "[", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", "for", "d", "in", "dirs", "if", "(", "(", "not", "d", ".", "startswith", "(", "'.'", ")", ")", "and", "is_pupil_rec_dir", "(", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", ")", ")", "]", "logger", ".", "debug", "(", "'Filtered Recording Dirs: {}'", ".", "format", "(", "filtered_recording_dirs", ")", ")", "return", "filtered_recording_dirs" ]
you can supply a data folder or any folder - all folders within will be checked for necessary files - in order to make a visualization .
train
false
3,099
def dt_to_filetime(dt, delta_from_utc): dt += delta_from_utc ft = (EPOCH_AS_FILETIME + (timegm(dt.timetuple()) * HUNDREDS_OF_NANOSECONDS)) return (ft + (dt.microsecond * 10))
[ "def", "dt_to_filetime", "(", "dt", ",", "delta_from_utc", ")", ":", "dt", "+=", "delta_from_utc", "ft", "=", "(", "EPOCH_AS_FILETIME", "+", "(", "timegm", "(", "dt", ".", "timetuple", "(", ")", ")", "*", "HUNDREDS_OF_NANOSECONDS", ")", ")", "return", "(", "ft", "+", "(", "dt", ".", "microsecond", "*", "10", ")", ")" ]
converts a datetime to microsoft filetime format .
train
false
3,100
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
reset a vm by performing a hard shutdown and then a restart .
train
false
3,101
def cho_solve_banded(cb_and_lower, b, overwrite_b=False, check_finite=True): (cb, lower) = cb_and_lower if check_finite: cb = asarray_chkfinite(cb) b = asarray_chkfinite(b) else: cb = asarray(cb) b = asarray(b) if (cb.shape[(-1)] != b.shape[0]): raise ValueError('shapes of cb and b are not compatible.') (pbtrs,) = get_lapack_funcs(('pbtrs',), (cb, b)) (x, info) = pbtrs(cb, b, lower=lower, overwrite_b=overwrite_b) if (info > 0): raise LinAlgError(('%d-th leading minor not positive definite' % info)) if (info < 0): raise ValueError(('illegal value in %d-th argument of internal pbtrs' % (- info))) return x
[ "def", "cho_solve_banded", "(", "cb_and_lower", ",", "b", ",", "overwrite_b", "=", "False", ",", "check_finite", "=", "True", ")", ":", "(", "cb", ",", "lower", ")", "=", "cb_and_lower", "if", "check_finite", ":", "cb", "=", "asarray_chkfinite", "(", "cb", ")", "b", "=", "asarray_chkfinite", "(", "b", ")", "else", ":", "cb", "=", "asarray", "(", "cb", ")", "b", "=", "asarray", "(", "b", ")", "if", "(", "cb", ".", "shape", "[", "(", "-", "1", ")", "]", "!=", "b", ".", "shape", "[", "0", "]", ")", ":", "raise", "ValueError", "(", "'shapes of cb and b are not compatible.'", ")", "(", "pbtrs", ",", ")", "=", "get_lapack_funcs", "(", "(", "'pbtrs'", ",", ")", ",", "(", "cb", ",", "b", ")", ")", "(", "x", ",", "info", ")", "=", "pbtrs", "(", "cb", ",", "b", ",", "lower", "=", "lower", ",", "overwrite_b", "=", "overwrite_b", ")", "if", "(", "info", ">", "0", ")", ":", "raise", "LinAlgError", "(", "(", "'%d-th leading minor not positive definite'", "%", "info", ")", ")", "if", "(", "info", "<", "0", ")", ":", "raise", "ValueError", "(", "(", "'illegal value in %d-th argument of internal pbtrs'", "%", "(", "-", "info", ")", ")", ")", "return", "x" ]
solve the linear equations a x = b .
train
false
3,102
def nl2br(string): return unicodifier(string).replace(u'\n', u'<br>\n')
[ "def", "nl2br", "(", "string", ")", ":", "return", "unicodifier", "(", "string", ")", ".", "replace", "(", "u'\\n'", ",", "u'<br>\\n'", ")" ]
converts newlines to html linebreaks in string .
train
false
3,103
def bind_arguments(func, args, kwargs): (args, kwargs, missing, extra, extra_positional, arg_spec, vararg_var, kwarg_var) = _parse_signature(func)(args, kwargs) values = {} for ((name, has_default, default), value) in zip(arg_spec, args): values[name] = value if (vararg_var is not None): values[vararg_var] = tuple(extra_positional) elif extra_positional: raise TypeError('too many positional arguments') if (kwarg_var is not None): multikw = (set(extra) & set([x[0] for x in arg_spec])) if multikw: raise TypeError(('got multiple values for keyword argument ' + repr(next(iter(multikw))))) values[kwarg_var] = extra elif extra: raise TypeError(('got unexpected keyword argument ' + repr(next(iter(extra))))) return values
[ "def", "bind_arguments", "(", "func", ",", "args", ",", "kwargs", ")", ":", "(", "args", ",", "kwargs", ",", "missing", ",", "extra", ",", "extra_positional", ",", "arg_spec", ",", "vararg_var", ",", "kwarg_var", ")", "=", "_parse_signature", "(", "func", ")", "(", "args", ",", "kwargs", ")", "values", "=", "{", "}", "for", "(", "(", "name", ",", "has_default", ",", "default", ")", ",", "value", ")", "in", "zip", "(", "arg_spec", ",", "args", ")", ":", "values", "[", "name", "]", "=", "value", "if", "(", "vararg_var", "is", "not", "None", ")", ":", "values", "[", "vararg_var", "]", "=", "tuple", "(", "extra_positional", ")", "elif", "extra_positional", ":", "raise", "TypeError", "(", "'too many positional arguments'", ")", "if", "(", "kwarg_var", "is", "not", "None", ")", ":", "multikw", "=", "(", "set", "(", "extra", ")", "&", "set", "(", "[", "x", "[", "0", "]", "for", "x", "in", "arg_spec", "]", ")", ")", "if", "multikw", ":", "raise", "TypeError", "(", "(", "'got multiple values for keyword argument '", "+", "repr", "(", "next", "(", "iter", "(", "multikw", ")", ")", ")", ")", ")", "values", "[", "kwarg_var", "]", "=", "extra", "elif", "extra", ":", "raise", "TypeError", "(", "(", "'got unexpected keyword argument '", "+", "repr", "(", "next", "(", "iter", "(", "extra", ")", ")", ")", ")", ")", "return", "values" ]
bind the arguments provided into a dict .
train
true
3,105
def test_x_flattened(): assert_equal(lae_b.X.shape[(-1)], 1) assert_equal(lae_a.X.shape[(-1)], 1)
[ "def", "test_x_flattened", "(", ")", ":", "assert_equal", "(", "lae_b", ".", "X", ".", "shape", "[", "(", "-", "1", ")", "]", ",", "1", ")", "assert_equal", "(", "lae_a", ".", "X", ".", "shape", "[", "(", "-", "1", ")", "]", ",", "1", ")" ]
lae: is x flattened and reshaped .
train
false
3,106
def get_stock_information_html(supplier, product): stock = StockCount.objects.filter(product=product, supplier=supplier).first() context = {'div_id': get_stock_information_div_id(supplier, product), 'sales_decimals': (product.sales_unit.decimals if product.sales_unit else 0), 'sales_unit': (product.sales_unit.short_name if product.sales_unit else ''), 'stock': stock} if ('shuup.notify' in settings.INSTALLED_APPS): context['alert_limit'] = True return render_to_string('shuup/simple_supplier/admin/stock_information.jinja', context)
[ "def", "get_stock_information_html", "(", "supplier", ",", "product", ")", ":", "stock", "=", "StockCount", ".", "objects", ".", "filter", "(", "product", "=", "product", ",", "supplier", "=", "supplier", ")", ".", "first", "(", ")", "context", "=", "{", "'div_id'", ":", "get_stock_information_div_id", "(", "supplier", ",", "product", ")", ",", "'sales_decimals'", ":", "(", "product", ".", "sales_unit", ".", "decimals", "if", "product", ".", "sales_unit", "else", "0", ")", ",", "'sales_unit'", ":", "(", "product", ".", "sales_unit", ".", "short_name", "if", "product", ".", "sales_unit", "else", "''", ")", ",", "'stock'", ":", "stock", "}", "if", "(", "'shuup.notify'", "in", "settings", ".", "INSTALLED_APPS", ")", ":", "context", "[", "'alert_limit'", "]", "=", "True", "return", "render_to_string", "(", "'shuup/simple_supplier/admin/stock_information.jinja'", ",", "context", ")" ]
get html string to show current stock information for product .
train
false
3,107
def sighandler(sig, stack): syslog.syslog(syslog.LOG_WARNING, ('Exiting on signal = %d..' % (sig,))) sys.exit((128 + signal.SIGTERM))
[ "def", "sighandler", "(", "sig", ",", "stack", ")", ":", "syslog", ".", "syslog", "(", "syslog", ".", "LOG_WARNING", ",", "(", "'Exiting on signal = %d..'", "%", "(", "sig", ",", ")", ")", ")", "sys", ".", "exit", "(", "(", "128", "+", "signal", ".", "SIGTERM", ")", ")" ]
a signal handler for the daemon .
train
false
3,108
def validate_files(pelican): for (dirpath, _, filenames) in os.walk(pelican.settings['OUTPUT_PATH']): for name in filenames: if should_validate(name): filepath = os.path.join(dirpath, name) validate(filepath)
[ "def", "validate_files", "(", "pelican", ")", ":", "for", "(", "dirpath", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "pelican", ".", "settings", "[", "'OUTPUT_PATH'", "]", ")", ":", "for", "name", "in", "filenames", ":", "if", "should_validate", "(", "name", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "name", ")", "validate", "(", "filepath", ")" ]
validate a generated html file .
train
true
3,109
def module_imports_on_top_of_file(logical_line, indent_level, checker_state, noqa): def is_string_literal(line): if (line[0] in 'uUbB'): line = line[1:] if (line and (line[0] in 'rR')): line = line[1:] return (line and ((line[0] == '"') or (line[0] == "'"))) allowed_try_keywords = ('try', 'except', 'else', 'finally') if indent_level: return if (not logical_line): return if noqa: return line = logical_line if (line.startswith('import ') or line.startswith('from ')): if checker_state.get('seen_non_imports', False): (yield (0, 'E402 module level import not at top of file')) elif any((line.startswith(kw) for kw in allowed_try_keywords)): return elif is_string_literal(line): if checker_state.get('seen_docstring', False): checker_state['seen_non_imports'] = True else: checker_state['seen_docstring'] = True else: checker_state['seen_non_imports'] = True
[ "def", "module_imports_on_top_of_file", "(", "logical_line", ",", "indent_level", ",", "checker_state", ",", "noqa", ")", ":", "def", "is_string_literal", "(", "line", ")", ":", "if", "(", "line", "[", "0", "]", "in", "'uUbB'", ")", ":", "line", "=", "line", "[", "1", ":", "]", "if", "(", "line", "and", "(", "line", "[", "0", "]", "in", "'rR'", ")", ")", ":", "line", "=", "line", "[", "1", ":", "]", "return", "(", "line", "and", "(", "(", "line", "[", "0", "]", "==", "'\"'", ")", "or", "(", "line", "[", "0", "]", "==", "\"'\"", ")", ")", ")", "allowed_try_keywords", "=", "(", "'try'", ",", "'except'", ",", "'else'", ",", "'finally'", ")", "if", "indent_level", ":", "return", "if", "(", "not", "logical_line", ")", ":", "return", "if", "noqa", ":", "return", "line", "=", "logical_line", "if", "(", "line", ".", "startswith", "(", "'import '", ")", "or", "line", ".", "startswith", "(", "'from '", ")", ")", ":", "if", "checker_state", ".", "get", "(", "'seen_non_imports'", ",", "False", ")", ":", "(", "yield", "(", "0", ",", "'E402 module level import not at top of file'", ")", ")", "elif", "any", "(", "(", "line", ".", "startswith", "(", "kw", ")", "for", "kw", "in", "allowed_try_keywords", ")", ")", ":", "return", "elif", "is_string_literal", "(", "line", ")", ":", "if", "checker_state", ".", "get", "(", "'seen_docstring'", ",", "False", ")", ":", "checker_state", "[", "'seen_non_imports'", "]", "=", "True", "else", ":", "checker_state", "[", "'seen_docstring'", "]", "=", "True", "else", ":", "checker_state", "[", "'seen_non_imports'", "]", "=", "True" ]
place imports at the top of the file .
train
true
3,111
def relationship_diff(current_items, new_items): return {'add': {k: new_items[k] for k in (set(new_items.keys()) - set(current_items.keys()))}, 'remove': {k: current_items[k] for k in (set(current_items.keys()) - set(new_items.keys()))}}
[ "def", "relationship_diff", "(", "current_items", ",", "new_items", ")", ":", "return", "{", "'add'", ":", "{", "k", ":", "new_items", "[", "k", "]", "for", "k", "in", "(", "set", "(", "new_items", ".", "keys", "(", ")", ")", "-", "set", "(", "current_items", ".", "keys", "(", ")", ")", ")", "}", ",", "'remove'", ":", "{", "k", ":", "current_items", "[", "k", "]", "for", "k", "in", "(", "set", "(", "current_items", ".", "keys", "(", ")", ")", "-", "set", "(", "new_items", ".", "keys", "(", ")", ")", ")", "}", "}" ]
to be used in post and put/patch relationship requests .
train
false
3,112
@contextlib.contextmanager def create_file_backed_module(code): with create_tempfile() as temp: module = test_utils.build_module(code) module.file = temp (yield module)
[ "@", "contextlib", ".", "contextmanager", "def", "create_file_backed_module", "(", "code", ")", ":", "with", "create_tempfile", "(", ")", "as", "temp", ":", "module", "=", "test_utils", ".", "build_module", "(", "code", ")", "module", ".", "file", "=", "temp", "(", "yield", "module", ")" ]
create an astroid module for the given code .
train
false
3,113
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
3,114
def _clean_salt_variables(params, variable_prefix='__'): list(list(map(params.pop, [k for k in params if k.startswith(variable_prefix)]))) return params
[ "def", "_clean_salt_variables", "(", "params", ",", "variable_prefix", "=", "'__'", ")", ":", "list", "(", "list", "(", "map", "(", "params", ".", "pop", ",", "[", "k", "for", "k", "in", "params", "if", "k", ".", "startswith", "(", "variable_prefix", ")", "]", ")", ")", ")", "return", "params" ]
pops out variables from params which starts with variable_prefix .
train
true
3,116
def templates_for_device(request, templates): from mezzanine.conf import settings if (not isinstance(templates, (list, tuple))): templates = [templates] device = device_from_request(request) device_templates = [] for template in templates: if device: device_templates.append((u'%s/%s' % (device, template))) if (settings.DEVICE_DEFAULT and (settings.DEVICE_DEFAULT != device)): default = (u'%s/%s' % (settings.DEVICE_DEFAULT, template)) device_templates.append(default) device_templates.append(template) return device_templates
[ "def", "templates_for_device", "(", "request", ",", "templates", ")", ":", "from", "mezzanine", ".", "conf", "import", "settings", "if", "(", "not", "isinstance", "(", "templates", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "templates", "=", "[", "templates", "]", "device", "=", "device_from_request", "(", "request", ")", "device_templates", "=", "[", "]", "for", "template", "in", "templates", ":", "if", "device", ":", "device_templates", ".", "append", "(", "(", "u'%s/%s'", "%", "(", "device", ",", "template", ")", ")", ")", "if", "(", "settings", ".", "DEVICE_DEFAULT", "and", "(", "settings", ".", "DEVICE_DEFAULT", "!=", "device", ")", ")", ":", "default", "=", "(", "u'%s/%s'", "%", "(", "settings", ".", "DEVICE_DEFAULT", ",", "template", ")", ")", "device_templates", ".", "append", "(", "default", ")", "device_templates", ".", "append", "(", "template", ")", "return", "device_templates" ]
given a template name .
train
true
3,117
def topology(func, *args): argtypes = [GEOM_PTR] if args: argtypes += args func.argtypes = argtypes func.restype = GEOM_PTR func.errcheck = check_geom return func
[ "def", "topology", "(", "func", ",", "*", "args", ")", ":", "argtypes", "=", "[", "GEOM_PTR", "]", "if", "args", ":", "argtypes", "+=", "args", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "GEOM_PTR", "func", ".", "errcheck", "=", "check_geom", "return", "func" ]
for geos unary topology functions .
train
false
3,119
@testing.requires_testing_data def test_make_inverse_operator_fixed(): fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False, force_fixed=False) fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False, force_fixed=True) evoked = _get_evoked() noise_cov = read_cov(fname_cov) assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_1, noise_cov, depth=0.8, loose=None, fixed=True) assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2, noise_cov, depth=0.8, loose=None, fixed=True) inv_op = make_inverse_operator(evoked.info, fwd_2, noise_cov, depth=None, loose=None, fixed=True) inverse_operator_nodepth = read_inverse_operator(fname_inv_fixed_nodepth) _compare_inverses_approx(inverse_operator_nodepth, inv_op, evoked, 0, 0.01) assert_true((compute_rank_inverse(inverse_operator_nodepth) == 302))
[ "@", "testing", ".", "requires_testing_data", "def", "test_make_inverse_operator_fixed", "(", ")", ":", "fwd_1", "=", "read_forward_solution_meg", "(", "fname_fwd", ",", "surf_ori", "=", "False", ",", "force_fixed", "=", "False", ")", "fwd_2", "=", "read_forward_solution_meg", "(", "fname_fwd", ",", "surf_ori", "=", "False", ",", "force_fixed", "=", "True", ")", "evoked", "=", "_get_evoked", "(", ")", "noise_cov", "=", "read_cov", "(", "fname_cov", ")", "assert_raises", "(", "ValueError", ",", "make_inverse_operator", ",", "evoked", ".", "info", ",", "fwd_1", ",", "noise_cov", ",", "depth", "=", "0.8", ",", "loose", "=", "None", ",", "fixed", "=", "True", ")", "assert_raises", "(", "ValueError", ",", "make_inverse_operator", ",", "evoked", ".", "info", ",", "fwd_2", ",", "noise_cov", ",", "depth", "=", "0.8", ",", "loose", "=", "None", ",", "fixed", "=", "True", ")", "inv_op", "=", "make_inverse_operator", "(", "evoked", ".", "info", ",", "fwd_2", ",", "noise_cov", ",", "depth", "=", "None", ",", "loose", "=", "None", ",", "fixed", "=", "True", ")", "inverse_operator_nodepth", "=", "read_inverse_operator", "(", "fname_inv_fixed_nodepth", ")", "_compare_inverses_approx", "(", "inverse_operator_nodepth", ",", "inv_op", ",", "evoked", ",", "0", ",", "0.01", ")", "assert_true", "(", "(", "compute_rank_inverse", "(", "inverse_operator_nodepth", ")", "==", "302", ")", ")" ]
test mne inverse computation .
train
false
3,121
def hookspath(): return [curdir]
[ "def", "hookspath", "(", ")", ":", "return", "[", "curdir", "]" ]
returns a list with the directory that contains the alternate pyinstaller hook for kivy .
train
false
3,122
def partial_product(start, stop): numfactors = ((stop - start) >> 1) if (not numfactors): return 1 elif (numfactors == 1): return start else: mid = ((start + numfactors) | 1) return (partial_product(start, mid) * partial_product(mid, stop))
[ "def", "partial_product", "(", "start", ",", "stop", ")", ":", "numfactors", "=", "(", "(", "stop", "-", "start", ")", ">>", "1", ")", "if", "(", "not", "numfactors", ")", ":", "return", "1", "elif", "(", "numfactors", "==", "1", ")", ":", "return", "start", "else", ":", "mid", "=", "(", "(", "start", "+", "numfactors", ")", "|", "1", ")", "return", "(", "partial_product", "(", "start", ",", "mid", ")", "*", "partial_product", "(", "mid", ",", "stop", ")", ")" ]
product of integers in range .
train
false
3,123
def openshift_img_streams(registry, xml_parent, data): scm = XML.SubElement(xml_parent, 'scm', {'class': 'com.openshift.jenkins.plugins.pipeline.OpenShiftImageStreams'}) mapping = [('image-stream-name', 'imageStreamName', 'nodejs-010-centos7'), ('tag', 'tag', 'latest'), ('api-url', 'apiURL', 'https://openshift.default.svc.cluster.local'), ('namespace', 'namespace', 'test'), ('auth-token', 'authToken', ''), ('verbose', 'verbose', False)] convert_mapping_to_xml(scm, data, mapping, fail_required=True)
[ "def", "openshift_img_streams", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "scm", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'scm'", ",", "{", "'class'", ":", "'com.openshift.jenkins.plugins.pipeline.OpenShiftImageStreams'", "}", ")", "mapping", "=", "[", "(", "'image-stream-name'", ",", "'imageStreamName'", ",", "'nodejs-010-centos7'", ")", ",", "(", "'tag'", ",", "'tag'", ",", "'latest'", ")", ",", "(", "'api-url'", ",", "'apiURL'", ",", "'https://openshift.default.svc.cluster.local'", ")", ",", "(", "'namespace'", ",", "'namespace'", ",", "'test'", ")", ",", "(", "'auth-token'", ",", "'authToken'", ",", "''", ")", ",", "(", "'verbose'", ",", "'verbose'", ",", "False", ")", "]", "convert_mapping_to_xml", "(", "scm", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")" ]
yaml: openshift-img-streams rather than a build step extension plugin .
train
false
3,124
def syntax_file(gcs_uri): language_client = language.Client() document = language_client.document_from_url(gcs_uri) tokens = document.analyze_syntax() for token in tokens: print '{}: {}'.format(token.part_of_speech, token.text_content)
[ "def", "syntax_file", "(", "gcs_uri", ")", ":", "language_client", "=", "language", ".", "Client", "(", ")", "document", "=", "language_client", ".", "document_from_url", "(", "gcs_uri", ")", "tokens", "=", "document", ".", "analyze_syntax", "(", ")", "for", "token", "in", "tokens", ":", "print", "'{}: {}'", ".", "format", "(", "token", ".", "part_of_speech", ",", "token", ".", "text_content", ")" ]
detects syntax in the file located in google cloud storage .
train
false
3,126
def disable_task(name): return _run_cmd('kapacitor disable {0}'.format(name))
[ "def", "disable_task", "(", "name", ")", ":", "return", "_run_cmd", "(", "'kapacitor disable {0}'", ".", "format", "(", "name", ")", ")" ]
disable a kapacitor task .
train
false
3,127
@composite def docker_image_strategy(draw, repository_strategy=unique_name_strategy(), tag_strategy=unique_name_strategy()): return DockerImage(repository=draw(repository_strategy), tag=draw(tag_strategy))
[ "@", "composite", "def", "docker_image_strategy", "(", "draw", ",", "repository_strategy", "=", "unique_name_strategy", "(", ")", ",", "tag_strategy", "=", "unique_name_strategy", "(", ")", ")", ":", "return", "DockerImage", "(", "repository", "=", "draw", "(", "repository_strategy", ")", ",", "tag", "=", "draw", "(", "tag_strategy", ")", ")" ]
a hypothesis strategy to generate a dockerimage .
train
false
3,130
def log_nodes_cb(node, status): import datetime import logging import json if (node.result is not None): try: runtime = node.result.runtime runtime_memory_gb = runtime.runtime_memory_gb runtime_threads = runtime.runtime_threads except AttributeError: runtime_memory_gb = runtime_threads = u'Unknown' else: runtime_memory_gb = runtime_threads = u'N/A' logger = logging.getLogger(u'callback') status_dict = {u'name': node.name, u'id': node._id, u'estimated_memory_gb': node._interface.estimated_memory_gb, u'num_threads': node._interface.num_threads} if (status == u'start'): status_dict[u'start'] = str(datetime.datetime.now()) elif (status == u'end'): status_dict[u'finish'] = str(datetime.datetime.now()) status_dict[u'runtime_threads'] = runtime_threads status_dict[u'runtime_memory_gb'] = runtime_memory_gb else: status_dict[u'finish'] = str(datetime.datetime.now()) status_dict[u'error'] = True logger.debug(json.dumps(status_dict))
[ "def", "log_nodes_cb", "(", "node", ",", "status", ")", ":", "import", "datetime", "import", "logging", "import", "json", "if", "(", "node", ".", "result", "is", "not", "None", ")", ":", "try", ":", "runtime", "=", "node", ".", "result", ".", "runtime", "runtime_memory_gb", "=", "runtime", ".", "runtime_memory_gb", "runtime_threads", "=", "runtime", ".", "runtime_threads", "except", "AttributeError", ":", "runtime_memory_gb", "=", "runtime_threads", "=", "u'Unknown'", "else", ":", "runtime_memory_gb", "=", "runtime_threads", "=", "u'N/A'", "logger", "=", "logging", ".", "getLogger", "(", "u'callback'", ")", "status_dict", "=", "{", "u'name'", ":", "node", ".", "name", ",", "u'id'", ":", "node", ".", "_id", ",", "u'estimated_memory_gb'", ":", "node", ".", "_interface", ".", "estimated_memory_gb", ",", "u'num_threads'", ":", "node", ".", "_interface", ".", "num_threads", "}", "if", "(", "status", "==", "u'start'", ")", ":", "status_dict", "[", "u'start'", "]", "=", "str", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ")", "elif", "(", "status", "==", "u'end'", ")", ":", "status_dict", "[", "u'finish'", "]", "=", "str", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ")", "status_dict", "[", "u'runtime_threads'", "]", "=", "runtime_threads", "status_dict", "[", "u'runtime_memory_gb'", "]", "=", "runtime_memory_gb", "else", ":", "status_dict", "[", "u'finish'", "]", "=", "str", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ")", "status_dict", "[", "u'error'", "]", "=", "True", "logger", ".", "debug", "(", "json", ".", "dumps", "(", "status_dict", ")", ")" ]
function to record node run statistics to a log file as json dictionaries parameters node : nipype .
train
false
3,131
def guard_memory_error(context, builder, pointer, msg=None): assert isinstance(pointer.type, ir.PointerType), pointer.type exc_args = ((msg,) if msg else ()) with builder.if_then(is_null(builder, pointer), likely=False): context.call_conv.return_user_exc(builder, MemoryError, exc_args)
[ "def", "guard_memory_error", "(", "context", ",", "builder", ",", "pointer", ",", "msg", "=", "None", ")", ":", "assert", "isinstance", "(", "pointer", ".", "type", ",", "ir", ".", "PointerType", ")", ",", "pointer", ".", "type", "exc_args", "=", "(", "(", "msg", ",", ")", "if", "msg", "else", "(", ")", ")", "with", "builder", ".", "if_then", "(", "is_null", "(", "builder", ",", "pointer", ")", ",", "likely", "=", "False", ")", ":", "context", ".", "call_conv", ".", "return_user_exc", "(", "builder", ",", "MemoryError", ",", "exc_args", ")" ]
guard against *pointer* being null .
train
false
3,132
def _greenthread_yield(dbapi_con, con_record): greenthread.sleep(0)
[ "def", "_greenthread_yield", "(", "dbapi_con", ",", "con_record", ")", ":", "greenthread", ".", "sleep", "(", "0", ")" ]
ensure other greenthreads get a chance to be executed .
train
false
3,133
def _len_guards(M): if ((int(M) != M) or (M < 0)): raise ValueError('Window length M must be a non-negative integer') return (M <= 1)
[ "def", "_len_guards", "(", "M", ")", ":", "if", "(", "(", "int", "(", "M", ")", "!=", "M", ")", "or", "(", "M", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'Window length M must be a non-negative integer'", ")", "return", "(", "M", "<=", "1", ")" ]
handle small or incorrect window lengths .
train
false
3,134
def _fit_and_predict(estimator, X, y, train, test, verbose, fit_params, method): fit_params = (fit_params if (fit_params is not None) else {}) fit_params = dict([(k, _index_param_value(X, v, train)) for (k, v) in fit_params.items()]) (X_train, y_train) = _safe_split(estimator, X, y, train) (X_test, _) = _safe_split(estimator, X, y, test, train) if (y_train is None): estimator.fit(X_train, **fit_params) else: estimator.fit(X_train, y_train, **fit_params) func = getattr(estimator, method) predictions = func(X_test) if (method in ['decision_function', 'predict_proba', 'predict_log_proba']): n_classes = len(set(y)) predictions_ = np.zeros((X_test.shape[0], n_classes)) if ((method == 'decision_function') and (len(estimator.classes_) == 2)): predictions_[:, estimator.classes_[(-1)]] = predictions else: predictions_[:, estimator.classes_] = predictions predictions = predictions_ return (predictions, test)
[ "def", "_fit_and_predict", "(", "estimator", ",", "X", ",", "y", ",", "train", ",", "test", ",", "verbose", ",", "fit_params", ",", "method", ")", ":", "fit_params", "=", "(", "fit_params", "if", "(", "fit_params", "is", "not", "None", ")", "else", "{", "}", ")", "fit_params", "=", "dict", "(", "[", "(", "k", ",", "_index_param_value", "(", "X", ",", "v", ",", "train", ")", ")", "for", "(", "k", ",", "v", ")", "in", "fit_params", ".", "items", "(", ")", "]", ")", "(", "X_train", ",", "y_train", ")", "=", "_safe_split", "(", "estimator", ",", "X", ",", "y", ",", "train", ")", "(", "X_test", ",", "_", ")", "=", "_safe_split", "(", "estimator", ",", "X", ",", "y", ",", "test", ",", "train", ")", "if", "(", "y_train", "is", "None", ")", ":", "estimator", ".", "fit", "(", "X_train", ",", "**", "fit_params", ")", "else", ":", "estimator", ".", "fit", "(", "X_train", ",", "y_train", ",", "**", "fit_params", ")", "func", "=", "getattr", "(", "estimator", ",", "method", ")", "predictions", "=", "func", "(", "X_test", ")", "if", "(", "method", "in", "[", "'decision_function'", ",", "'predict_proba'", ",", "'predict_log_proba'", "]", ")", ":", "n_classes", "=", "len", "(", "set", "(", "y", ")", ")", "predictions_", "=", "np", ".", "zeros", "(", "(", "X_test", ".", "shape", "[", "0", "]", ",", "n_classes", ")", ")", "if", "(", "(", "method", "==", "'decision_function'", ")", "and", "(", "len", "(", "estimator", ".", "classes_", ")", "==", "2", ")", ")", ":", "predictions_", "[", ":", ",", "estimator", ".", "classes_", "[", "(", "-", "1", ")", "]", "]", "=", "predictions", "else", ":", "predictions_", "[", ":", ",", "estimator", ".", "classes_", "]", "=", "predictions", "predictions", "=", "predictions_", "return", "(", "predictions", ",", "test", ")" ]
fit estimator and predict values for a given dataset split .
train
false
3,135
def layer_option(option, opt, value, parser): try: dest = int(value) except ValueError: dest = value setattr(parser.values, option.dest, dest)
[ "def", "layer_option", "(", "option", ",", "opt", ",", "value", ",", "parser", ")", ":", "try", ":", "dest", "=", "int", "(", "value", ")", "except", "ValueError", ":", "dest", "=", "value", "setattr", "(", "parser", ".", "values", ",", "option", ".", "dest", ",", "dest", ")" ]
callback for make_option for the ogrinspect layer_key keyword option which may be an integer or a string .
train
false
3,136
def create_nzb_string(file_elements, xmlns): root_element = ETree.Element('nzb') if xmlns: root_element.set('xmlns', xmlns) for cur_file in file_elements: root_element.append(strip_xmlns(cur_file, xmlns)) return ETree.tostring(ss(root_element))
[ "def", "create_nzb_string", "(", "file_elements", ",", "xmlns", ")", ":", "root_element", "=", "ETree", ".", "Element", "(", "'nzb'", ")", "if", "xmlns", ":", "root_element", ".", "set", "(", "'xmlns'", ",", "xmlns", ")", "for", "cur_file", "in", "file_elements", ":", "root_element", ".", "append", "(", "strip_xmlns", "(", "cur_file", ",", "xmlns", ")", ")", "return", "ETree", ".", "tostring", "(", "ss", "(", "root_element", ")", ")" ]
extract extra info from file_elements .
train
false
3,137
def require_perms(view_func, required): from horizon.exceptions import NotAuthorized current_perms = getattr(view_func, '_required_perms', set([])) view_func._required_perms = (current_perms | set(required)) @functools.wraps(view_func, assigned=available_attrs(view_func)) def dec(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.has_perms(view_func._required_perms): return view_func(request, *args, **kwargs) raise NotAuthorized((_('You are not authorized to access %s') % request.path)) if required: return dec else: return view_func
[ "def", "require_perms", "(", "view_func", ",", "required", ")", ":", "from", "horizon", ".", "exceptions", "import", "NotAuthorized", "current_perms", "=", "getattr", "(", "view_func", ",", "'_required_perms'", ",", "set", "(", "[", "]", ")", ")", "view_func", ".", "_required_perms", "=", "(", "current_perms", "|", "set", "(", "required", ")", ")", "@", "functools", ".", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "dec", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "if", "request", ".", "user", ".", "has_perms", "(", "view_func", ".", "_required_perms", ")", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "raise", "NotAuthorized", "(", "(", "_", "(", "'You are not authorized to access %s'", ")", "%", "request", ".", "path", ")", ")", "if", "required", ":", "return", "dec", "else", ":", "return", "view_func" ]
enforces permission-based access controls .
train
true
3,138
def fisher_z_transform(r): if (abs(r) >= 1): return nan return (0.5 * log(((1.0 + r) / (1.0 - r))))
[ "def", "fisher_z_transform", "(", "r", ")", ":", "if", "(", "abs", "(", "r", ")", ">=", "1", ")", ":", "return", "nan", "return", "(", "0.5", "*", "log", "(", "(", "(", "1.0", "+", "r", ")", "/", "(", "1.0", "-", "r", ")", ")", ")", ")" ]
calculate the fisher z transform of a correlation coefficient .
train
false
3,139
def test_nm1_fit(): ratio = 'auto' nm1 = NearMiss(ratio=ratio, random_state=RND_SEED, version=VERSION_NEARMISS) nm1.fit(X, Y) assert_equal(nm1.min_c_, 0) assert_equal(nm1.maj_c_, 2) assert_equal(nm1.stats_c_[0], 3) assert_equal(nm1.stats_c_[1], 5) assert_equal(nm1.stats_c_[2], 7)
[ "def", "test_nm1_fit", "(", ")", ":", "ratio", "=", "'auto'", "nm1", "=", "NearMiss", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ",", "version", "=", "VERSION_NEARMISS", ")", "nm1", ".", "fit", "(", "X", ",", "Y", ")", "assert_equal", "(", "nm1", ".", "min_c_", ",", "0", ")", "assert_equal", "(", "nm1", ".", "maj_c_", ",", "2", ")", "assert_equal", "(", "nm1", ".", "stats_c_", "[", "0", "]", ",", "3", ")", "assert_equal", "(", "nm1", ".", "stats_c_", "[", "1", "]", ",", "5", ")", "assert_equal", "(", "nm1", ".", "stats_c_", "[", "2", "]", ",", "7", ")" ]
test the fitting method .
train
false
3,140
def get_default_project(): result = check_run_quick('gcloud config list', echo=False) return re.search('project = (.*)\n', result.stdout).group(1)
[ "def", "get_default_project", "(", ")", ":", "result", "=", "check_run_quick", "(", "'gcloud config list'", ",", "echo", "=", "False", ")", "return", "re", ".", "search", "(", "'project = (.*)\\n'", ",", "result", ".", "stdout", ")", ".", "group", "(", "1", ")" ]
determine the default project name .
train
false
3,141
def NamedTuple(typename, fields): fields = [(n, t) for (n, t) in fields] cls = collections.namedtuple(typename, [n for (n, t) in fields]) cls._field_types = dict(fields) try: cls.__module__ = sys._getframe(1).f_globals.get(u'__name__', u'__main__') except (AttributeError, ValueError): pass return cls
[ "def", "NamedTuple", "(", "typename", ",", "fields", ")", ":", "fields", "=", "[", "(", "n", ",", "t", ")", "for", "(", "n", ",", "t", ")", "in", "fields", "]", "cls", "=", "collections", ".", "namedtuple", "(", "typename", ",", "[", "n", "for", "(", "n", ",", "t", ")", "in", "fields", "]", ")", "cls", ".", "_field_types", "=", "dict", "(", "fields", ")", "try", ":", "cls", ".", "__module__", "=", "sys", ".", "_getframe", "(", "1", ")", ".", "f_globals", ".", "get", "(", "u'__name__'", ",", "u'__main__'", ")", "except", "(", "AttributeError", ",", "ValueError", ")", ":", "pass", "return", "cls" ]
typed version of namedtuple .
train
false
3,142
def common_environment(): env = dict(LC_ALL='en_US.UTF-8', PATH=os.environ.get('PATH', os.defpath)) required = ('HOME',) optional = ('HTTPTESTER', 'SSH_AUTH_SOCK') env.update(pass_vars(required=required, optional=optional)) return env
[ "def", "common_environment", "(", ")", ":", "env", "=", "dict", "(", "LC_ALL", "=", "'en_US.UTF-8'", ",", "PATH", "=", "os", ".", "environ", ".", "get", "(", "'PATH'", ",", "os", ".", "defpath", ")", ")", "required", "=", "(", "'HOME'", ",", ")", "optional", "=", "(", "'HTTPTESTER'", ",", "'SSH_AUTH_SOCK'", ")", "env", ".", "update", "(", "pass_vars", "(", "required", "=", "required", ",", "optional", "=", "optional", ")", ")", "return", "env" ]
common environment used for executing all programs .
train
false
3,143
def _parse_kern_pairs(fh): line = fh.readline() if (not line.startswith('StartKernPairs')): raise RuntimeError(('Bad start of kern pairs data: %s' % line)) d = {} while 1: line = fh.readline() if (not line): break line = line.rstrip() if (len(line) == 0): continue if line.startswith('EndKernPairs'): fh.readline() return d vals = line.split() if ((len(vals) != 4) or (vals[0] != 'KPX')): raise RuntimeError(('Bad kern pairs line: %s' % line)) (c1, c2, val) = (vals[1], vals[2], _to_float(vals[3])) d[(c1, c2)] = val raise RuntimeError('Bad kern pairs parse')
[ "def", "_parse_kern_pairs", "(", "fh", ")", ":", "line", "=", "fh", ".", "readline", "(", ")", "if", "(", "not", "line", ".", "startswith", "(", "'StartKernPairs'", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Bad start of kern pairs data: %s'", "%", "line", ")", ")", "d", "=", "{", "}", "while", "1", ":", "line", "=", "fh", ".", "readline", "(", ")", "if", "(", "not", "line", ")", ":", "break", "line", "=", "line", ".", "rstrip", "(", ")", "if", "(", "len", "(", "line", ")", "==", "0", ")", ":", "continue", "if", "line", ".", "startswith", "(", "'EndKernPairs'", ")", ":", "fh", ".", "readline", "(", ")", "return", "d", "vals", "=", "line", ".", "split", "(", ")", "if", "(", "(", "len", "(", "vals", ")", "!=", "4", ")", "or", "(", "vals", "[", "0", "]", "!=", "'KPX'", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Bad kern pairs line: %s'", "%", "line", ")", ")", "(", "c1", ",", "c2", ",", "val", ")", "=", "(", "vals", "[", "1", "]", ",", "vals", "[", "2", "]", ",", "_to_float", "(", "vals", "[", "3", "]", ")", ")", "d", "[", "(", "c1", ",", "c2", ")", "]", "=", "val", "raise", "RuntimeError", "(", "'Bad kern pairs parse'", ")" ]
return a kern pairs dictionary; keys are tuples and values are the kern pair value .
train
false
3,144
def fileBitFilter(mode): for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]: if (mode & bit): mode -= bit return mode
[ "def", "fileBitFilter", "(", "mode", ")", ":", "for", "bit", "in", "[", "stat", ".", "S_IXUSR", ",", "stat", ".", "S_IXGRP", ",", "stat", ".", "S_IXOTH", ",", "stat", ".", "S_ISUID", ",", "stat", ".", "S_ISGID", "]", ":", "if", "(", "mode", "&", "bit", ")", ":", "mode", "-=", "bit", "return", "mode" ]
strip special filesystem bits from file .
train
false
3,145
@sopel.module.commands(u'update') def pm_f_update(bot, trigger): if trigger.is_privmsg: f_update(bot, trigger)
[ "@", "sopel", ".", "module", ".", "commands", "(", "u'update'", ")", "def", "pm_f_update", "(", "bot", ",", "trigger", ")", ":", "if", "trigger", ".", "is_privmsg", ":", "f_update", "(", "bot", ",", "trigger", ")" ]
wrapper for allowing delivery of .
train
false
3,147
def _iter_modules(path): import os import pkgutil if hasattr(pkgutil, 'iter_modules'): for (importer, modname, ispkg) in pkgutil.iter_modules(path): (yield (modname, ispkg)) return from inspect import getmodulename from pydoc import ispackage found = set() for path in path: for filename in os.listdir(path): p = os.path.join(path, filename) modname = getmodulename(filename) if (modname and (modname != '__init__')): if (modname not in found): found.add(modname) (yield (modname, ispackage(modname)))
[ "def", "_iter_modules", "(", "path", ")", ":", "import", "os", "import", "pkgutil", "if", "hasattr", "(", "pkgutil", ",", "'iter_modules'", ")", ":", "for", "(", "importer", ",", "modname", ",", "ispkg", ")", "in", "pkgutil", ".", "iter_modules", "(", "path", ")", ":", "(", "yield", "(", "modname", ",", "ispkg", ")", ")", "return", "from", "inspect", "import", "getmodulename", "from", "pydoc", "import", "ispackage", "found", "=", "set", "(", ")", "for", "path", "in", "path", ":", "for", "filename", "in", "os", ".", "listdir", "(", "path", ")", ":", "p", "=", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", "modname", "=", "getmodulename", "(", "filename", ")", "if", "(", "modname", "and", "(", "modname", "!=", "'__init__'", ")", ")", ":", "if", "(", "modname", "not", "in", "found", ")", ":", "found", ".", "add", "(", "modname", ")", "(", "yield", "(", "modname", ",", "ispackage", "(", "modname", ")", ")", ")" ]
iterate over all modules in a package .
train
false
3,148
def binSearch(arr, val): i = bisect_left(arr, val) if ((i != len(arr)) and (arr[i] == val)): return i return (-1)
[ "def", "binSearch", "(", "arr", ",", "val", ")", ":", "i", "=", "bisect_left", "(", "arr", ",", "val", ")", "if", "(", "(", "i", "!=", "len", "(", "arr", ")", ")", "and", "(", "arr", "[", "i", "]", "==", "val", ")", ")", ":", "return", "i", "return", "(", "-", "1", ")" ]
function for running binary search on a sorted list .
train
true
3,149
def shift_time(dtime, shift): return (dtime + timedelta(seconds=shift))
[ "def", "shift_time", "(", "dtime", ",", "shift", ")", ":", "return", "(", "dtime", "+", "timedelta", "(", "seconds", "=", "shift", ")", ")" ]
adds/deletes an integer amount of seconds from a datetime specification .
train
false
3,151
def meshgrid_triangles(n): tri = [] for i in range((n - 1)): for j in range((n - 1)): a = (i + (j * n)) b = ((i + 1) + (j * n)) c = (i + ((j + 1) * n)) d = ((i + 1) + ((j + 1) * n)) tri += [[a, b, d], [a, d, c]] return np.array(tri, dtype=np.int32)
[ "def", "meshgrid_triangles", "(", "n", ")", ":", "tri", "=", "[", "]", "for", "i", "in", "range", "(", "(", "n", "-", "1", ")", ")", ":", "for", "j", "in", "range", "(", "(", "n", "-", "1", ")", ")", ":", "a", "=", "(", "i", "+", "(", "j", "*", "n", ")", ")", "b", "=", "(", "(", "i", "+", "1", ")", "+", "(", "j", "*", "n", ")", ")", "c", "=", "(", "i", "+", "(", "(", "j", "+", "1", ")", "*", "n", ")", ")", "d", "=", "(", "(", "i", "+", "1", ")", "+", "(", "(", "j", "+", "1", ")", "*", "n", ")", ")", "tri", "+=", "[", "[", "a", ",", "b", ",", "d", "]", ",", "[", "a", ",", "d", ",", "c", "]", "]", "return", "np", ".", "array", "(", "tri", ",", "dtype", "=", "np", ".", "int32", ")" ]
utility function .
train
false
3,152
def stable_cumsum(arr, axis=None, rtol=1e-05, atol=1e-08): if (np_version < (1, 9)): return np.cumsum(arr, axis=axis, dtype=np.float64) out = np.cumsum(arr, axis=axis, dtype=np.float64) expected = np.sum(arr, axis=axis, dtype=np.float64) if (not np.all(np.isclose(out.take((-1), axis=axis), expected, rtol=rtol, atol=atol, equal_nan=True))): warnings.warn('cumsum was found to be unstable: its last element does not correspond to sum', RuntimeWarning) return out
[ "def", "stable_cumsum", "(", "arr", ",", "axis", "=", "None", ",", "rtol", "=", "1e-05", ",", "atol", "=", "1e-08", ")", ":", "if", "(", "np_version", "<", "(", "1", ",", "9", ")", ")", ":", "return", "np", ".", "cumsum", "(", "arr", ",", "axis", "=", "axis", ",", "dtype", "=", "np", ".", "float64", ")", "out", "=", "np", ".", "cumsum", "(", "arr", ",", "axis", "=", "axis", ",", "dtype", "=", "np", ".", "float64", ")", "expected", "=", "np", ".", "sum", "(", "arr", ",", "axis", "=", "axis", ",", "dtype", "=", "np", ".", "float64", ")", "if", "(", "not", "np", ".", "all", "(", "np", ".", "isclose", "(", "out", ".", "take", "(", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", ",", "expected", ",", "rtol", "=", "rtol", ",", "atol", "=", "atol", ",", "equal_nan", "=", "True", ")", ")", ")", ":", "warnings", ".", "warn", "(", "'cumsum was found to be unstable: its last element does not correspond to sum'", ",", "RuntimeWarning", ")", "return", "out" ]
use high precision for cumsum and check that final value matches sum parameters arr : array-like to be cumulatively summed as flat axis : int .
train
false
3,154
def funshion_download_by_drama_url(url, output_dir='.', merge=False, info_only=False): id = r1('http://www.fun.tv/vplay/.*g-(\\d+)', url) video_list = funshion_drama_id_to_vid(id) for video in video_list: funshion_download_by_id((video[0], id), output_dir=output_dir, merge=merge, info_only=info_only)
[ "def", "funshion_download_by_drama_url", "(", "url", ",", "output_dir", "=", "'.'", ",", "merge", "=", "False", ",", "info_only", "=", "False", ")", ":", "id", "=", "r1", "(", "'http://www.fun.tv/vplay/.*g-(\\\\d+)'", ",", "url", ")", "video_list", "=", "funshion_drama_id_to_vid", "(", "id", ")", "for", "video", "in", "video_list", ":", "funshion_download_by_id", "(", "(", "video", "[", "0", "]", ",", "id", ")", ",", "output_dir", "=", "output_dir", ",", "merge", "=", "merge", ",", "info_only", "=", "info_only", ")" ]
str->none url = URL .
train
false
3,155
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('console_type', metavar='<console-type>', help=_('Type of rdp console ("rdp-html5").')) def do_get_rdp_console(cs, args): server = _find_server(cs, args.server) data = server.get_rdp_console(args.console_type) print_console(cs, data)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'console_type'", ",", "metavar", "=", "'<console-type>'", ",", "help", "=", "_", "(", "'Type of rdp console (\"rdp-html5\").'", ")", ")", "def", "do_get_rdp_console", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "data", "=", "server", ".", "get_rdp_console", "(", "args", ".", "console_type", ")", "print_console", "(", "cs", ",", "data", ")" ]
get a rdp console to a server .
train
false
3,156
def build_single_handler_application(path, argv=None): argv = (argv or []) path = os.path.abspath(path) if os.path.isdir(path): handler = DirectoryHandler(filename=path, argv=argv) elif path.endswith('.ipynb'): handler = NotebookHandler(filename=path, argv=argv) elif path.endswith('.py'): if path.endswith('main.py'): warnings.warn(DIRSTYLE_MAIN_WARNING) handler = ScriptHandler(filename=path, argv=argv) else: raise ValueError(("Expected a '.py' script or '.ipynb' notebook, got: '%s'" % path)) if handler.failed: raise RuntimeError(('Error loading %s:\n\n%s\n%s ' % (path, handler.error, handler.error_detail))) application = Application(handler) return application
[ "def", "build_single_handler_application", "(", "path", ",", "argv", "=", "None", ")", ":", "argv", "=", "(", "argv", "or", "[", "]", ")", "path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "handler", "=", "DirectoryHandler", "(", "filename", "=", "path", ",", "argv", "=", "argv", ")", "elif", "path", ".", "endswith", "(", "'.ipynb'", ")", ":", "handler", "=", "NotebookHandler", "(", "filename", "=", "path", ",", "argv", "=", "argv", ")", "elif", "path", ".", "endswith", "(", "'.py'", ")", ":", "if", "path", ".", "endswith", "(", "'main.py'", ")", ":", "warnings", ".", "warn", "(", "DIRSTYLE_MAIN_WARNING", ")", "handler", "=", "ScriptHandler", "(", "filename", "=", "path", ",", "argv", "=", "argv", ")", "else", ":", "raise", "ValueError", "(", "(", "\"Expected a '.py' script or '.ipynb' notebook, got: '%s'\"", "%", "path", ")", ")", "if", "handler", ".", "failed", ":", "raise", "RuntimeError", "(", "(", "'Error loading %s:\\n\\n%s\\n%s '", "%", "(", "path", ",", "handler", ".", "error", ",", "handler", ".", "error_detail", ")", ")", ")", "application", "=", "Application", "(", "handler", ")", "return", "application" ]
return a bokeh application built using a single handler for a file or directory .
train
true
3,157
def x10_command(command): return check_output((['heyu'] + command.split(' ')), stderr=STDOUT)
[ "def", "x10_command", "(", "command", ")", ":", "return", "check_output", "(", "(", "[", "'heyu'", "]", "+", "command", ".", "split", "(", "' '", ")", ")", ",", "stderr", "=", "STDOUT", ")" ]
execute x10 command and check output .
train
false
3,158
def iddr_asvd(A, k): A = np.asfortranarray(A) (m, n) = A.shape w = np.empty(((((((2 * k) + 28) * m) + (((6 * k) + 21) * n)) + (25 * (k ** 2))) + 100), order='F') w_ = iddr_aidi(m, n, k) w[:w_.size] = w_ (U, V, S, ier) = _id.iddr_asvd(A, k, w) if (ier != 0): raise _RETCODE_ERROR return (U, V, S)
[ "def", "iddr_asvd", "(", "A", ",", "k", ")", ":", "A", "=", "np", ".", "asfortranarray", "(", "A", ")", "(", "m", ",", "n", ")", "=", "A", ".", "shape", "w", "=", "np", ".", "empty", "(", "(", "(", "(", "(", "(", "(", "2", "*", "k", ")", "+", "28", ")", "*", "m", ")", "+", "(", "(", "(", "6", "*", "k", ")", "+", "21", ")", "*", "n", ")", ")", "+", "(", "25", "*", "(", "k", "**", "2", ")", ")", ")", "+", "100", ")", ",", "order", "=", "'F'", ")", "w_", "=", "iddr_aidi", "(", "m", ",", "n", ",", "k", ")", "w", "[", ":", "w_", ".", "size", "]", "=", "w_", "(", "U", ",", "V", ",", "S", ",", "ier", ")", "=", "_id", ".", "iddr_asvd", "(", "A", ",", "k", ",", "w", ")", "if", "(", "ier", "!=", "0", ")", ":", "raise", "_RETCODE_ERROR", "return", "(", "U", ",", "V", ",", "S", ")" ]
compute svd of a real matrix to a specified rank using random sampling .
train
false
3,159
def bzr_wc_target_exists_local_mods_no_force(): test = 'bzr_wc_target_exists_local_mods_no_force' wt = ('%s-test-%s' % (DIR, test)) puts(magenta(('Executing test: %s' % test))) from fabric.api import cd, run from fabtools.files import is_dir from fabtools import require require.bazaar.working_copy(REMOTE_URL, wt) assert is_dir(wt) with cd(wt): assert (run('bzr status') == '') run('echo "# a new comment" >> __init__.py') assert (run('bzr status') != '') try: require.bazaar.working_copy(REMOTE_URL, wt) except SystemExit: pass else: assert False, "working_copy didn't raise exception"
[ "def", "bzr_wc_target_exists_local_mods_no_force", "(", ")", ":", "test", "=", "'bzr_wc_target_exists_local_mods_no_force'", "wt", "=", "(", "'%s-test-%s'", "%", "(", "DIR", ",", "test", ")", ")", "puts", "(", "magenta", "(", "(", "'Executing test: %s'", "%", "test", ")", ")", ")", "from", "fabric", ".", "api", "import", "cd", ",", "run", "from", "fabtools", ".", "files", "import", "is_dir", "from", "fabtools", "import", "require", "require", ".", "bazaar", ".", "working_copy", "(", "REMOTE_URL", ",", "wt", ")", "assert", "is_dir", "(", "wt", ")", "with", "cd", "(", "wt", ")", ":", "assert", "(", "run", "(", "'bzr status'", ")", "==", "''", ")", "run", "(", "'echo \"# a new comment\" >> __init__.py'", ")", "assert", "(", "run", "(", "'bzr status'", ")", "!=", "''", ")", "try", ":", "require", ".", "bazaar", ".", "working_copy", "(", "REMOTE_URL", ",", "wt", ")", "except", "SystemExit", ":", "pass", "else", ":", "assert", "False", ",", "\"working_copy didn't raise exception\"" ]
test working copy when a target already exists and has local modifications but force was not specified .
train
false
3,161
@pytest.fixture def js_tester(webview, qtbot): return JSTester(webview, qtbot)
[ "@", "pytest", ".", "fixture", "def", "js_tester", "(", "webview", ",", "qtbot", ")", ":", "return", "JSTester", "(", "webview", ",", "qtbot", ")" ]
fixture to test javascript snippets .
train
false
3,162
def log_cursor(f): def wrapper(*a, **kw): try: SimpleCursorBase.execute = _execute ret = f(*a, **kw) finally: del SimpleCursorBase.execute return ret return wrapper
[ "def", "log_cursor", "(", "f", ")", ":", "def", "wrapper", "(", "*", "a", ",", "**", "kw", ")", ":", "try", ":", "SimpleCursorBase", ".", "execute", "=", "_execute", "ret", "=", "f", "(", "*", "a", ",", "**", "kw", ")", "finally", ":", "del", "SimpleCursorBase", ".", "execute", "return", "ret", "return", "wrapper" ]
prints sql and params to stdout .
train
false
3,164
def fix_eols(s): s = re.sub('(?<!\\r)\\n', CRLF, s) s = re.sub('\\r(?!\\n)', CRLF, s) return s
[ "def", "fix_eols", "(", "s", ")", ":", "s", "=", "re", ".", "sub", "(", "'(?<!\\\\r)\\\\n'", ",", "CRLF", ",", "s", ")", "s", "=", "re", ".", "sub", "(", "'\\\\r(?!\\\\n)'", ",", "CRLF", ",", "s", ")", "return", "s" ]
replace all line-ending characters with .
train
false
3,165
@app.route('/library') def library(): papers = papers_from_library() ret = encode_json(papers, 500) if g.user: msg = ('%d papers in your library:' % (len(ret),)) else: msg = 'You must be logged in. Once you are, you can save papers to your library (with the save icon on the right of each paper) and they will show up here.' ctx = default_context(papers, render_format='library', msg=msg) return render_template('main.html', **ctx)
[ "@", "app", ".", "route", "(", "'/library'", ")", "def", "library", "(", ")", ":", "papers", "=", "papers_from_library", "(", ")", "ret", "=", "encode_json", "(", "papers", ",", "500", ")", "if", "g", ".", "user", ":", "msg", "=", "(", "'%d papers in your library:'", "%", "(", "len", "(", "ret", ")", ",", ")", ")", "else", ":", "msg", "=", "'You must be logged in. Once you are, you can save papers to your library (with the save icon on the right of each paper) and they will show up here.'", "ctx", "=", "default_context", "(", "papers", ",", "render_format", "=", "'library'", ",", "msg", "=", "msg", ")", "return", "render_template", "(", "'main.html'", ",", "**", "ctx", ")" ]
render users library .
train
false
3,169
def teardown_module(): reload_module(db_replicator)
[ "def", "teardown_module", "(", ")", ":", "reload_module", "(", "db_replicator", ")" ]
tear down the module .
train
false
3,171
def unpooling_2d(x, ksize, stride=None, pad=0, outsize=None, cover_all=True): return Unpooling2D(ksize, stride, pad, outsize, cover_all)(x)
[ "def", "unpooling_2d", "(", "x", ",", "ksize", ",", "stride", "=", "None", ",", "pad", "=", "0", ",", "outsize", "=", "None", ",", "cover_all", "=", "True", ")", ":", "return", "Unpooling2D", "(", "ksize", ",", "stride", ",", "pad", ",", "outsize", ",", "cover_all", ")", "(", "x", ")" ]
inverse operation of pooling for 2d array .
train
false
3,172
@task @timed def i18n_fastgenerate(): sh('i18n_tool generate')
[ "@", "task", "@", "timed", "def", "i18n_fastgenerate", "(", ")", ":", "sh", "(", "'i18n_tool generate'", ")" ]
compile localizable strings from sources without re-extracting strings first .
train
false
3,173
def _check_to_bytes(self, in_string, encoding, expected): self.assertEqual(to_bytes(in_string, encoding), expected)
[ "def", "_check_to_bytes", "(", "self", ",", "in_string", ",", "encoding", ",", "expected", ")", ":", "self", ".", "assertEqual", "(", "to_bytes", "(", "in_string", ",", "encoding", ")", ",", "expected", ")" ]
test happy path of encoding to bytes .
train
false
3,174
def getAttributeDictionaryString(attributeDictionary): attributeDictionaryString = '' attributeDictionaryKeys = attributeDictionary.keys() attributeDictionaryKeys.sort(compareAttributeKeyAscending) for attributeDictionaryKey in attributeDictionaryKeys: valueString = str(attributeDictionary[attributeDictionaryKey]) if ('"' in valueString): attributeDictionaryString += (" %s='%s'" % (attributeDictionaryKey, valueString)) else: attributeDictionaryString += (' %s="%s"' % (attributeDictionaryKey, valueString)) return attributeDictionaryString
[ "def", "getAttributeDictionaryString", "(", "attributeDictionary", ")", ":", "attributeDictionaryString", "=", "''", "attributeDictionaryKeys", "=", "attributeDictionary", ".", "keys", "(", ")", "attributeDictionaryKeys", ".", "sort", "(", "compareAttributeKeyAscending", ")", "for", "attributeDictionaryKey", "in", "attributeDictionaryKeys", ":", "valueString", "=", "str", "(", "attributeDictionary", "[", "attributeDictionaryKey", "]", ")", "if", "(", "'\"'", "in", "valueString", ")", ":", "attributeDictionaryString", "+=", "(", "\" %s='%s'\"", "%", "(", "attributeDictionaryKey", ",", "valueString", ")", ")", "else", ":", "attributeDictionaryString", "+=", "(", "' %s=\"%s\"'", "%", "(", "attributeDictionaryKey", ",", "valueString", ")", ")", "return", "attributeDictionaryString" ]
add the closed xml tag .
train
false
3,175
def split_code_at_show(text): parts = [] is_doctest = contains_doctest(text) part = [] for line in text.split(u'\n'): if (((not is_doctest) and (line.strip() == u'plt.show()')) or (is_doctest and (line.strip() == u'>>> plt.show()'))): part.append(line) parts.append(u'\n'.join(part)) part = [] else: part.append(line) if u'\n'.join(part).strip(): parts.append(u'\n'.join(part)) return parts
[ "def", "split_code_at_show", "(", "text", ")", ":", "parts", "=", "[", "]", "is_doctest", "=", "contains_doctest", "(", "text", ")", "part", "=", "[", "]", "for", "line", "in", "text", ".", "split", "(", "u'\\n'", ")", ":", "if", "(", "(", "(", "not", "is_doctest", ")", "and", "(", "line", ".", "strip", "(", ")", "==", "u'plt.show()'", ")", ")", "or", "(", "is_doctest", "and", "(", "line", ".", "strip", "(", ")", "==", "u'>>> plt.show()'", ")", ")", ")", ":", "part", ".", "append", "(", "line", ")", "parts", ".", "append", "(", "u'\\n'", ".", "join", "(", "part", ")", ")", "part", "=", "[", "]", "else", ":", "part", ".", "append", "(", "line", ")", "if", "u'\\n'", ".", "join", "(", "part", ")", ".", "strip", "(", ")", ":", "parts", ".", "append", "(", "u'\\n'", ".", "join", "(", "part", ")", ")", "return", "parts" ]
split code at plt .
train
false
3,178
def test_deleted_folder_on_fetch(monkeypatch, generic_client, constants): def raise_invalid_uid_exc(*args, **kwargs): raise imapclient.IMAPClient.Error('[UNAVAILABLE] UID FETCH Server error while fetching messages') monkeypatch.setattr('imapclient.IMAPClient.fetch', raise_invalid_uid_exc) generic_client.uids(['125'])
[ "def", "test_deleted_folder_on_fetch", "(", "monkeypatch", ",", "generic_client", ",", "constants", ")", ":", "def", "raise_invalid_uid_exc", "(", "*", "args", ",", "**", "kwargs", ")", ":", "raise", "imapclient", ".", "IMAPClient", ".", "Error", "(", "'[UNAVAILABLE] UID FETCH Server error while fetching messages'", ")", "monkeypatch", ".", "setattr", "(", "'imapclient.IMAPClient.fetch'", ",", "raise_invalid_uid_exc", ")", "generic_client", ".", "uids", "(", "[", "'125'", "]", ")" ]
test that a select failed examine error specifying that a folder doesnt exist is converted into a foldermissingerror .
train
false
3,179
def _get_dev2_hostname(backend, instance=None): try: return servers.get_hostname(server=backend, instance=instance) except servers.InvalidServerError: raise InvalidBackendError() except servers.InvalidInstancesError: raise InvalidInstanceError()
[ "def", "_get_dev2_hostname", "(", "backend", ",", "instance", "=", "None", ")", ":", "try", ":", "return", "servers", ".", "get_hostname", "(", "server", "=", "backend", ",", "instance", "=", "instance", ")", "except", "servers", ".", "InvalidServerError", ":", "raise", "InvalidBackendError", "(", ")", "except", "servers", ".", "InvalidInstancesError", ":", "raise", "InvalidInstanceError", "(", ")" ]
returns the hostname of a backend [instance] in devappserver2 .
train
false
3,180
def test_range(Chart): if ((Chart in (Pie, Treemap, Dot, SolidGauge)) or issubclass(Chart, BaseMap)): return chart = Chart() chart.range = (0, 100) chart.add('', [1, 2, 10]) q = chart.render_pyquery() axis = map(str, range(0, 101, 10)) if (Chart == Radar): axis = map(str, range(100, (-1), (-20))) z = ('x' if (getattr(chart, 'horizontal', False) or (Chart == Gauge)) else 'y') assert ([t.text for t in q(('.axis.%s .guides text' % z))] == list(axis))
[ "def", "test_range", "(", "Chart", ")", ":", "if", "(", "(", "Chart", "in", "(", "Pie", ",", "Treemap", ",", "Dot", ",", "SolidGauge", ")", ")", "or", "issubclass", "(", "Chart", ",", "BaseMap", ")", ")", ":", "return", "chart", "=", "Chart", "(", ")", "chart", ".", "range", "=", "(", "0", ",", "100", ")", "chart", ".", "add", "(", "''", ",", "[", "1", ",", "2", ",", "10", "]", ")", "q", "=", "chart", ".", "render_pyquery", "(", ")", "axis", "=", "map", "(", "str", ",", "range", "(", "0", ",", "101", ",", "10", ")", ")", "if", "(", "Chart", "==", "Radar", ")", ":", "axis", "=", "map", "(", "str", ",", "range", "(", "100", ",", "(", "-", "1", ")", ",", "(", "-", "20", ")", ")", ")", "z", "=", "(", "'x'", "if", "(", "getattr", "(", "chart", ",", "'horizontal'", ",", "False", ")", "or", "(", "Chart", "==", "Gauge", ")", ")", "else", "'y'", ")", "assert", "(", "[", "t", ".", "text", "for", "t", "in", "q", "(", "(", "'.axis.%s .guides text'", "%", "z", ")", ")", "]", "==", "list", "(", "axis", ")", ")" ]
output of edge detection should be in [0 .
train
false
3,181
def get_ref_to_doc(refname, title=''): ref = addnodes.pending_xref(reftype='ref', reftarget=refname, refexplicit=(title != ''), refdomain='std') ref += nodes.literal(title, title, classes=['xref']) return ref
[ "def", "get_ref_to_doc", "(", "refname", ",", "title", "=", "''", ")", ":", "ref", "=", "addnodes", ".", "pending_xref", "(", "reftype", "=", "'ref'", ",", "reftarget", "=", "refname", ",", "refexplicit", "=", "(", "title", "!=", "''", ")", ",", "refdomain", "=", "'std'", ")", "ref", "+=", "nodes", ".", "literal", "(", "title", ",", "title", ",", "classes", "=", "[", "'xref'", "]", ")", "return", "ref" ]
returns a node that links to a document with the given ref name .
train
false
3,184
def bottomElementNode(derivation, target): xmlObject = target.xmlObject if (xmlObject == None): print 'Warning, bottomTarget in bottom could not get xmlObject for:' print target print derivation.elementNode return targetMatrix = matrix.getBranchMatrixSetElementNode(target) lift = derivation.altitude transformedPaths = xmlObject.getTransformedPaths() if (len(transformedPaths) > 0): lift += (derivation.getAdditionalPathLift() - euclidean.getBottomByPaths(transformedPaths)) else: lift -= boolean_geometry.getMinimumZ(xmlObject) targetMatrix.tetragrid = matrix.getIdentityTetragrid(targetMatrix.tetragrid) targetMatrix.tetragrid[2][3] += lift matrix.setElementNodeDictionaryMatrix(target, targetMatrix)
[ "def", "bottomElementNode", "(", "derivation", ",", "target", ")", ":", "xmlObject", "=", "target", ".", "xmlObject", "if", "(", "xmlObject", "==", "None", ")", ":", "print", "'Warning, bottomTarget in bottom could not get xmlObject for:'", "print", "target", "print", "derivation", ".", "elementNode", "return", "targetMatrix", "=", "matrix", ".", "getBranchMatrixSetElementNode", "(", "target", ")", "lift", "=", "derivation", ".", "altitude", "transformedPaths", "=", "xmlObject", ".", "getTransformedPaths", "(", ")", "if", "(", "len", "(", "transformedPaths", ")", ">", "0", ")", ":", "lift", "+=", "(", "derivation", ".", "getAdditionalPathLift", "(", ")", "-", "euclidean", ".", "getBottomByPaths", "(", "transformedPaths", ")", ")", "else", ":", "lift", "-=", "boolean_geometry", ".", "getMinimumZ", "(", "xmlObject", ")", "targetMatrix", ".", "tetragrid", "=", "matrix", ".", "getIdentityTetragrid", "(", "targetMatrix", ".", "tetragrid", ")", "targetMatrix", ".", "tetragrid", "[", "2", "]", "[", "3", "]", "+=", "lift", "matrix", ".", "setElementNodeDictionaryMatrix", "(", "target", ",", "targetMatrix", ")" ]
bottom target .
train
false
3,185
@not_implemented_for('directed') def is_connected(G): if (len(G) == 0): raise nx.NetworkXPointlessConcept('Connectivity is undefined ', 'for the null graph.') return (len(set(_plain_bfs(G, arbitrary_element(G)))) == len(G))
[ "@", "not_implemented_for", "(", "'directed'", ")", "def", "is_connected", "(", "G", ")", ":", "if", "(", "len", "(", "G", ")", "==", "0", ")", ":", "raise", "nx", ".", "NetworkXPointlessConcept", "(", "'Connectivity is undefined '", ",", "'for the null graph.'", ")", "return", "(", "len", "(", "set", "(", "_plain_bfs", "(", "G", ",", "arbitrary_element", "(", "G", ")", ")", ")", ")", "==", "len", "(", "G", ")", ")" ]
return true if the graph is connected .
train
false
3,186
def page_title_breadcrumbs(*crumbs, **kwargs): platform_name = get_value('platform_name', settings.PLATFORM_NAME) separator = kwargs.get('separator', ' | ') crumbs = [c for c in crumbs if (c is not None)] if crumbs: return u'{}{}{}'.format(separator.join(crumbs), separator, platform_name) else: return platform_name
[ "def", "page_title_breadcrumbs", "(", "*", "crumbs", ",", "**", "kwargs", ")", ":", "platform_name", "=", "get_value", "(", "'platform_name'", ",", "settings", ".", "PLATFORM_NAME", ")", "separator", "=", "kwargs", ".", "get", "(", "'separator'", ",", "' | '", ")", "crumbs", "=", "[", "c", "for", "c", "in", "crumbs", "if", "(", "c", "is", "not", "None", ")", "]", "if", "crumbs", ":", "return", "u'{}{}{}'", ".", "format", "(", "separator", ".", "join", "(", "crumbs", ")", ",", "separator", ",", "platform_name", ")", "else", ":", "return", "platform_name" ]
this function creates a suitable page title in the form: specific | less specific | general | edx it will output the correct platform name for the request .
train
false
3,187
def welcome_osf4m(email): from website.files.models import OsfStorageFileNode if email.user.date_last_login: if (email.user.date_last_login > (timezone.now() - settings.WELCOME_OSF4M_WAIT_TIME_GRACE)): return False upload = OsfStorageFileNode.load(email.data['fid']) if upload: email.data['downloads'] = upload.get_download_count() else: email.data['downloads'] = 0 email.save() return True
[ "def", "welcome_osf4m", "(", "email", ")", ":", "from", "website", ".", "files", ".", "models", "import", "OsfStorageFileNode", "if", "email", ".", "user", ".", "date_last_login", ":", "if", "(", "email", ".", "user", ".", "date_last_login", ">", "(", "timezone", ".", "now", "(", ")", "-", "settings", ".", "WELCOME_OSF4M_WAIT_TIME_GRACE", ")", ")", ":", "return", "False", "upload", "=", "OsfStorageFileNode", ".", "load", "(", "email", ".", "data", "[", "'fid'", "]", ")", "if", "upload", ":", "email", ".", "data", "[", "'downloads'", "]", "=", "upload", ".", "get_download_count", "(", ")", "else", ":", "email", ".", "data", "[", "'downloads'", "]", "=", "0", "email", ".", "save", "(", ")", "return", "True" ]
presend has two functions .
train
false
3,188
def _CanBreakBefore(prev_token, cur_token): pval = prev_token.value cval = cur_token.value if py3compat.PY3: if ((pval == 'yield') and (cval == 'from')): return False if ((pval in {'async', 'await'}) and (cval in {'def', 'with', 'for'})): return False if (cur_token.split_penalty >= split_penalty.UNBREAKABLE): return False if (pval == '@'): return False if (cval == ':'): return False if (cval == ','): return False if (prev_token.is_name and (cval == '(')): return False if (prev_token.is_name and (cval == '[')): return False if (prev_token.is_name and (cval == '.')): return False if (cur_token.is_comment and (prev_token.lineno == cur_token.lineno)): return False if (format_token.Subtype.UNARY_OPERATOR in prev_token.subtypes): return False return True
[ "def", "_CanBreakBefore", "(", "prev_token", ",", "cur_token", ")", ":", "pval", "=", "prev_token", ".", "value", "cval", "=", "cur_token", ".", "value", "if", "py3compat", ".", "PY3", ":", "if", "(", "(", "pval", "==", "'yield'", ")", "and", "(", "cval", "==", "'from'", ")", ")", ":", "return", "False", "if", "(", "(", "pval", "in", "{", "'async'", ",", "'await'", "}", ")", "and", "(", "cval", "in", "{", "'def'", ",", "'with'", ",", "'for'", "}", ")", ")", ":", "return", "False", "if", "(", "cur_token", ".", "split_penalty", ">=", "split_penalty", ".", "UNBREAKABLE", ")", ":", "return", "False", "if", "(", "pval", "==", "'@'", ")", ":", "return", "False", "if", "(", "cval", "==", "':'", ")", ":", "return", "False", "if", "(", "cval", "==", "','", ")", ":", "return", "False", "if", "(", "prev_token", ".", "is_name", "and", "(", "cval", "==", "'('", ")", ")", ":", "return", "False", "if", "(", "prev_token", ".", "is_name", "and", "(", "cval", "==", "'['", ")", ")", ":", "return", "False", "if", "(", "prev_token", ".", "is_name", "and", "(", "cval", "==", "'.'", ")", ")", ":", "return", "False", "if", "(", "cur_token", ".", "is_comment", "and", "(", "prev_token", ".", "lineno", "==", "cur_token", ".", "lineno", ")", ")", ":", "return", "False", "if", "(", "format_token", ".", "Subtype", ".", "UNARY_OPERATOR", "in", "prev_token", ".", "subtypes", ")", ":", "return", "False", "return", "True" ]
return true if a line break may occur before the current token .
train
false
3,189
def str2hex(value, prefix='', glue=u'', format='%02X'): if isinstance(glue, str): glue = unicode(glue) if (0 < len(prefix)): text = [prefix] else: text = [] for character in value: text.append((format % ord(character))) return glue.join(text)
[ "def", "str2hex", "(", "value", ",", "prefix", "=", "''", ",", "glue", "=", "u''", ",", "format", "=", "'%02X'", ")", ":", "if", "isinstance", "(", "glue", ",", "str", ")", ":", "glue", "=", "unicode", "(", "glue", ")", "if", "(", "0", "<", "len", "(", "prefix", ")", ")", ":", "text", "=", "[", "prefix", "]", "else", ":", "text", "=", "[", "]", "for", "character", "in", "value", ":", "text", ".", "append", "(", "(", "format", "%", "ord", "(", "character", ")", ")", ")", "return", "glue", ".", "join", "(", "text", ")" ]
convert a string to hex encoded format .
train
false
3,190
def accepts(*types): def check_accepts(f): assert (len(types) == f.__code__.co_argcount) def new_f(*args, **kwds): for (a, t) in zip(args, types): assert isinstance(a, t), ('arg %r does not match %s' % (a, t)) return f(*args, **kwds) new_f.__name__ = f.__name__ return new_f return check_accepts
[ "def", "accepts", "(", "*", "types", ")", ":", "def", "check_accepts", "(", "f", ")", ":", "assert", "(", "len", "(", "types", ")", "==", "f", ".", "__code__", ".", "co_argcount", ")", "def", "new_f", "(", "*", "args", ",", "**", "kwds", ")", ":", "for", "(", "a", ",", "t", ")", "in", "zip", "(", "args", ",", "types", ")", ":", "assert", "isinstance", "(", "a", ",", "t", ")", ",", "(", "'arg %r does not match %s'", "%", "(", "a", ",", "t", ")", ")", "return", "f", "(", "*", "args", ",", "**", "kwds", ")", "new_f", ".", "__name__", "=", "f", ".", "__name__", "return", "new_f", "return", "check_accepts" ]
return whether this request has an accept header that matches type .
train
false
3,191
def get_cython_version(): import Cython.Compiler.Main match = re.search('^([0-9]+)\\.([0-9]+)', Cython.Compiler.Main.Version.version) try: return map(int, match.groups()) except AttributeError: raise ImportError
[ "def", "get_cython_version", "(", ")", ":", "import", "Cython", ".", "Compiler", ".", "Main", "match", "=", "re", ".", "search", "(", "'^([0-9]+)\\\\.([0-9]+)'", ",", "Cython", ".", "Compiler", ".", "Main", ".", "Version", ".", "version", ")", "try", ":", "return", "map", "(", "int", ",", "match", ".", "groups", "(", ")", ")", "except", "AttributeError", ":", "raise", "ImportError" ]
returns: version as a pair of ints raises: importerror: cant load cython or find version .
train
true
3,192
def donor(): tablename = 'org_donor' table = s3db[tablename] tablename = 'org_donor' s3.crud_strings[tablename] = Storage(label_create=ADD_DONOR, title_display=T('Donor Details'), title_list=T('Donors Report'), title_update=T('Edit Donor'), label_list_button=T('List Donors'), label_delete_button=T('Delete Donor'), msg_record_created=T('Donor added'), msg_record_modified=T('Donor updated'), msg_record_deleted=T('Donor deleted'), msg_list_empty=T('No Donors currently registered')) s3db.configure(tablename, listadd=False) output = s3_rest_controller() return output
[ "def", "donor", "(", ")", ":", "tablename", "=", "'org_donor'", "table", "=", "s3db", "[", "tablename", "]", "tablename", "=", "'org_donor'", "s3", ".", "crud_strings", "[", "tablename", "]", "=", "Storage", "(", "label_create", "=", "ADD_DONOR", ",", "title_display", "=", "T", "(", "'Donor Details'", ")", ",", "title_list", "=", "T", "(", "'Donors Report'", ")", ",", "title_update", "=", "T", "(", "'Edit Donor'", ")", ",", "label_list_button", "=", "T", "(", "'List Donors'", ")", ",", "label_delete_button", "=", "T", "(", "'Delete Donor'", ")", ",", "msg_record_created", "=", "T", "(", "'Donor added'", ")", ",", "msg_record_modified", "=", "T", "(", "'Donor updated'", ")", ",", "msg_record_deleted", "=", "T", "(", "'Donor deleted'", ")", ",", "msg_list_empty", "=", "T", "(", "'No Donors currently registered'", ")", ")", "s3db", ".", "configure", "(", "tablename", ",", "listadd", "=", "False", ")", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful crud controller .
train
false
3,193
def delete_network_acl(network_acl_id=None, network_acl_name=None, disassociate=False, region=None, key=None, keyid=None, profile=None): if disassociate: network_acl = _get_resource('network_acl', name=network_acl_name, region=region, key=key, keyid=keyid, profile=profile) if (network_acl and network_acl.associations): subnet_id = network_acl.associations[0].subnet_id try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.disassociate_network_acl(subnet_id) except BotoServerError: pass return _delete_resource(resource='network_acl', name=network_acl_name, resource_id=network_acl_id, region=region, key=key, keyid=keyid, profile=profile)
[ "def", "delete_network_acl", "(", "network_acl_id", "=", "None", ",", "network_acl_name", "=", "None", ",", "disassociate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "disassociate", ":", "network_acl", "=", "_get_resource", "(", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "network_acl", "and", "network_acl", ".", "associations", ")", ":", "subnet_id", "=", "network_acl", ".", "associations", "[", "0", "]", ".", "subnet_id", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "disassociate_network_acl", "(", "subnet_id", ")", "except", "BotoServerError", ":", "pass", "return", "_delete_resource", "(", "resource", "=", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "resource_id", "=", "network_acl_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")" ]
delete a network acl based on the network_acl_id or network_acl_name provided .
train
true
3,194
def show_env(): envs = ['PATH', 'ORACLE_HOME', 'TNS_ADMIN', 'NLS_LANG'] result = {} for env in envs: if (env in os.environ): result[env] = os.environ[env] return result
[ "def", "show_env", "(", ")", ":", "envs", "=", "[", "'PATH'", ",", "'ORACLE_HOME'", ",", "'TNS_ADMIN'", ",", "'NLS_LANG'", "]", "result", "=", "{", "}", "for", "env", "in", "envs", ":", "if", "(", "env", "in", "os", ".", "environ", ")", ":", "result", "[", "env", "]", "=", "os", ".", "environ", "[", "env", "]", "return", "result" ]
show environment used by oracle client cli example: .
train
true
3,195
def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer): stream = ShaStreamCls(stream) tell = stream.tell write = stream.write version = 2 write('DIRC') write(pack('>LL', version, len(entries))) for entry in entries: beginoffset = tell() write(entry[4]) write(entry[5]) path = entry[3] path = force_bytes(path, encoding=defenc) plen = (len(path) & CE_NAMEMASK) assert (plen == len(path)), ('Path %s too long to fit into index' % entry[3]) flags = (plen | (entry[2] & CE_NAMEMASK_INV)) write(pack('>LLLLLL20sH', entry[6], entry[7], entry[0], entry[8], entry[9], entry[10], entry[1], flags)) write(path) real_size = (((tell() - beginoffset) + 8) & (~ 7)) write(('\x00' * ((beginoffset + real_size) - tell()))) if (extension_data is not None): stream.write(extension_data) stream.write_sha()
[ "def", "write_cache", "(", "entries", ",", "stream", ",", "extension_data", "=", "None", ",", "ShaStreamCls", "=", "IndexFileSHA1Writer", ")", ":", "stream", "=", "ShaStreamCls", "(", "stream", ")", "tell", "=", "stream", ".", "tell", "write", "=", "stream", ".", "write", "version", "=", "2", "write", "(", "'DIRC'", ")", "write", "(", "pack", "(", "'>LL'", ",", "version", ",", "len", "(", "entries", ")", ")", ")", "for", "entry", "in", "entries", ":", "beginoffset", "=", "tell", "(", ")", "write", "(", "entry", "[", "4", "]", ")", "write", "(", "entry", "[", "5", "]", ")", "path", "=", "entry", "[", "3", "]", "path", "=", "force_bytes", "(", "path", ",", "encoding", "=", "defenc", ")", "plen", "=", "(", "len", "(", "path", ")", "&", "CE_NAMEMASK", ")", "assert", "(", "plen", "==", "len", "(", "path", ")", ")", ",", "(", "'Path %s too long to fit into index'", "%", "entry", "[", "3", "]", ")", "flags", "=", "(", "plen", "|", "(", "entry", "[", "2", "]", "&", "CE_NAMEMASK_INV", ")", ")", "write", "(", "pack", "(", "'>LLLLLL20sH'", ",", "entry", "[", "6", "]", ",", "entry", "[", "7", "]", ",", "entry", "[", "0", "]", ",", "entry", "[", "8", "]", ",", "entry", "[", "9", "]", ",", "entry", "[", "10", "]", ",", "entry", "[", "1", "]", ",", "flags", ")", ")", "write", "(", "path", ")", "real_size", "=", "(", "(", "(", "tell", "(", ")", "-", "beginoffset", ")", "+", "8", ")", "&", "(", "~", "7", ")", ")", "write", "(", "(", "'\\x00'", "*", "(", "(", "beginoffset", "+", "real_size", ")", "-", "tell", "(", ")", ")", ")", ")", "if", "(", "extension_data", "is", "not", "None", ")", ":", "stream", ".", "write", "(", "extension_data", ")", "stream", ".", "write_sha", "(", ")" ]
write the cache represented by entries to a stream .
train
true
3,196
def idz_snorm(m, n, matveca, matvec, its=20): (snorm, v) = _id.idz_snorm(m, n, matveca, matvec, its) return snorm
[ "def", "idz_snorm", "(", "m", ",", "n", ",", "matveca", ",", "matvec", ",", "its", "=", "20", ")", ":", "(", "snorm", ",", "v", ")", "=", "_id", ".", "idz_snorm", "(", "m", ",", "n", ",", "matveca", ",", "matvec", ",", "its", ")", "return", "snorm" ]
estimate spectral norm of a complex matrix by the randomized power method .
train
false
3,197
def convertElementNode(elementNode, geometryOutput): if (geometryOutput == None): return if (len(geometryOutput) < 1): return if (len(geometryOutput) == 1): firstLoop = geometryOutput[0] if (firstLoop.__class__ == list): geometryOutput = firstLoop firstElement = geometryOutput[0] if (firstElement.__class__ == list): if (len(firstElement) > 1): convertElementNodeRenameByPaths(elementNode, geometryOutput) else: convertElementNodeByPath(elementNode, firstElement) else: convertElementNodeByPath(elementNode, geometryOutput)
[ "def", "convertElementNode", "(", "elementNode", ",", "geometryOutput", ")", ":", "if", "(", "geometryOutput", "==", "None", ")", ":", "return", "if", "(", "len", "(", "geometryOutput", ")", "<", "1", ")", ":", "return", "if", "(", "len", "(", "geometryOutput", ")", "==", "1", ")", ":", "firstLoop", "=", "geometryOutput", "[", "0", "]", "if", "(", "firstLoop", ".", "__class__", "==", "list", ")", ":", "geometryOutput", "=", "firstLoop", "firstElement", "=", "geometryOutput", "[", "0", "]", "if", "(", "firstElement", ".", "__class__", "==", "list", ")", ":", "if", "(", "len", "(", "firstElement", ")", ">", "1", ")", ":", "convertElementNodeRenameByPaths", "(", "elementNode", ",", "geometryOutput", ")", "else", ":", "convertElementNodeByPath", "(", "elementNode", ",", "firstElement", ")", "else", ":", "convertElementNodeByPath", "(", "elementNode", ",", "geometryOutput", ")" ]
convert the xml element to a difference xml element .
train
false
3,198
def safe_string_equals(a, b): if (len(a) != len(b)): return False result = 0 for (x, y) in zip(a, b): result |= (ord(x) ^ ord(y)) return (result == 0)
[ "def", "safe_string_equals", "(", "a", ",", "b", ")", ":", "if", "(", "len", "(", "a", ")", "!=", "len", "(", "b", ")", ")", ":", "return", "False", "result", "=", "0", "for", "(", "x", ",", "y", ")", "in", "zip", "(", "a", ",", "b", ")", ":", "result", "|=", "(", "ord", "(", "x", ")", "^", "ord", "(", "y", ")", ")", "return", "(", "result", "==", "0", ")" ]
near-constant time string comparison .
train
true
3,199
def p_unary_expression_5(t): pass
[ "def", "p_unary_expression_5", "(", "t", ")", ":", "pass" ]
unary_expression : sizeof unary_expression .
train
false
3,200
def change_VERSION_NOTIFY(version_notify): oldSetting = sickbeard.VERSION_NOTIFY sickbeard.VERSION_NOTIFY = version_notify if (not version_notify): sickbeard.NEWEST_VERSION_STRING = None if ((oldSetting is False) and (version_notify is True)): sickbeard.versionCheckScheduler.forceRun()
[ "def", "change_VERSION_NOTIFY", "(", "version_notify", ")", ":", "oldSetting", "=", "sickbeard", ".", "VERSION_NOTIFY", "sickbeard", ".", "VERSION_NOTIFY", "=", "version_notify", "if", "(", "not", "version_notify", ")", ":", "sickbeard", ".", "NEWEST_VERSION_STRING", "=", "None", "if", "(", "(", "oldSetting", "is", "False", ")", "and", "(", "version_notify", "is", "True", ")", ")", ":", "sickbeard", ".", "versionCheckScheduler", ".", "forceRun", "(", ")" ]
change frequency of versioncheck thread .
train
false
3,201
def localised_filesize(number): def rnd(number, divisor): return localised_number((float(((number * 10) / divisor)) / 10)) if (number < 1024): return _('{bytes} bytes').format(bytes=localised_number(number)) elif (number < (1024 ** 2)): return _('{kibibytes} KiB').format(kibibytes=rnd(number, 1024)) elif (number < (1024 ** 3)): return _('{mebibytes} MiB').format(mebibytes=rnd(number, (1024 ** 2))) elif (number < (1024 ** 4)): return _('{gibibytes} GiB').format(gibibytes=rnd(number, (1024 ** 3))) else: return _('{tebibytes} TiB').format(tebibytes=rnd(number, (1024 ** 4)))
[ "def", "localised_filesize", "(", "number", ")", ":", "def", "rnd", "(", "number", ",", "divisor", ")", ":", "return", "localised_number", "(", "(", "float", "(", "(", "(", "number", "*", "10", ")", "/", "divisor", ")", ")", "/", "10", ")", ")", "if", "(", "number", "<", "1024", ")", ":", "return", "_", "(", "'{bytes} bytes'", ")", ".", "format", "(", "bytes", "=", "localised_number", "(", "number", ")", ")", "elif", "(", "number", "<", "(", "1024", "**", "2", ")", ")", ":", "return", "_", "(", "'{kibibytes} KiB'", ")", ".", "format", "(", "kibibytes", "=", "rnd", "(", "number", ",", "1024", ")", ")", "elif", "(", "number", "<", "(", "1024", "**", "3", ")", ")", ":", "return", "_", "(", "'{mebibytes} MiB'", ")", ".", "format", "(", "mebibytes", "=", "rnd", "(", "number", ",", "(", "1024", "**", "2", ")", ")", ")", "elif", "(", "number", "<", "(", "1024", "**", "4", ")", ")", ":", "return", "_", "(", "'{gibibytes} GiB'", ")", ".", "format", "(", "gibibytes", "=", "rnd", "(", "number", ",", "(", "1024", "**", "3", ")", ")", ")", "else", ":", "return", "_", "(", "'{tebibytes} TiB'", ")", ".", "format", "(", "tebibytes", "=", "rnd", "(", "number", ",", "(", "1024", "**", "4", ")", ")", ")" ]
returns a localised unicode representation of a number in bytes .
train
false
3,202
def zipf_sequence(n, alpha=2.0, xmin=1): return [zipf_rv(alpha, xmin) for _ in range(n)]
[ "def", "zipf_sequence", "(", "n", ",", "alpha", "=", "2.0", ",", "xmin", "=", "1", ")", ":", "return", "[", "zipf_rv", "(", "alpha", ",", "xmin", ")", "for", "_", "in", "range", "(", "n", ")", "]" ]
return a sample sequence of length n from a zipf distribution with exponent parameter alpha and minimum value xmin .
train
false
3,203
def corner_foerstner(image, sigma=1): (Axx, Axy, Ayy) = structure_tensor(image, sigma) detA = ((Axx * Ayy) - (Axy ** 2)) traceA = (Axx + Ayy) w = np.zeros_like(image, dtype=np.double) q = np.zeros_like(image, dtype=np.double) mask = (traceA != 0) w[mask] = (detA[mask] / traceA[mask]) q[mask] = ((4 * detA[mask]) / (traceA[mask] ** 2)) return (w, q)
[ "def", "corner_foerstner", "(", "image", ",", "sigma", "=", "1", ")", ":", "(", "Axx", ",", "Axy", ",", "Ayy", ")", "=", "structure_tensor", "(", "image", ",", "sigma", ")", "detA", "=", "(", "(", "Axx", "*", "Ayy", ")", "-", "(", "Axy", "**", "2", ")", ")", "traceA", "=", "(", "Axx", "+", "Ayy", ")", "w", "=", "np", ".", "zeros_like", "(", "image", ",", "dtype", "=", "np", ".", "double", ")", "q", "=", "np", ".", "zeros_like", "(", "image", ",", "dtype", "=", "np", ".", "double", ")", "mask", "=", "(", "traceA", "!=", "0", ")", "w", "[", "mask", "]", "=", "(", "detA", "[", "mask", "]", "/", "traceA", "[", "mask", "]", ")", "q", "[", "mask", "]", "=", "(", "(", "4", "*", "detA", "[", "mask", "]", ")", "/", "(", "traceA", "[", "mask", "]", "**", "2", ")", ")", "return", "(", "w", ",", "q", ")" ]
compute foerstner corner measure response image .
train
false
3,204
def inv(a): return (~ a)
[ "def", "inv", "(", "a", ")", ":", "return", "(", "~", "a", ")" ]
compute the inverse of a matrix with lu decomposition and forward / backward substitutions .
train
false
3,205
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir): try: import setuptools if (setuptools.__version__ == '0.0.1'): print >>sys.stderr, 'You have an obsolete version of setuptools installed. Please\nremove it from your system entirely before rerunning this script.' sys.exit(2) except ImportError: egg = download_setuptools(version, download_base, to_dir) sys.path.insert(0, egg) import setuptools setuptools.bootstrap_install_from = egg import pkg_resources try: pkg_resources.require(('setuptools>=' + version)) except pkg_resources.VersionConflict: print >>sys.stderr, ("The required version of setuptools (>=%s) is not available, and\ncan't be installed while this script is running. Please install\n a more recent version first." % version) sys.exit(2)
[ "def", "use_setuptools", "(", "version", "=", "DEFAULT_VERSION", ",", "download_base", "=", "DEFAULT_URL", ",", "to_dir", "=", "os", ".", "curdir", ")", ":", "try", ":", "import", "setuptools", "if", "(", "setuptools", ".", "__version__", "==", "'0.0.1'", ")", ":", "print", ">>", "sys", ".", "stderr", ",", "'You have an obsolete version of setuptools installed. Please\\nremove it from your system entirely before rerunning this script.'", "sys", ".", "exit", "(", "2", ")", "except", "ImportError", ":", "egg", "=", "download_setuptools", "(", "version", ",", "download_base", ",", "to_dir", ")", "sys", ".", "path", ".", "insert", "(", "0", ",", "egg", ")", "import", "setuptools", "setuptools", ".", "bootstrap_install_from", "=", "egg", "import", "pkg_resources", "try", ":", "pkg_resources", ".", "require", "(", "(", "'setuptools>='", "+", "version", ")", ")", "except", "pkg_resources", ".", "VersionConflict", ":", "print", ">>", "sys", ".", "stderr", ",", "(", "\"The required version of setuptools (>=%s) is not available, and\\ncan't be installed while this script is running. Please install\\n a more recent version first.\"", "%", "version", ")", "sys", ".", "exit", "(", "2", ")" ]
ensure that a setuptools version is installed .
train
true
3,206
@command(('(da|dv)pl\\s+%s' % PL)) def down_plist(dltype, parturl): plist(parturl) dump(False) title = g.pafy_pls[parturl][0].title subdir = util.mswinfn(title.replace('/', '-')) down_many(dltype, '1-', subdir=subdir) msg = g.message plist(parturl) g.message = msg
[ "@", "command", "(", "(", "'(da|dv)pl\\\\s+%s'", "%", "PL", ")", ")", "def", "down_plist", "(", "dltype", ",", "parturl", ")", ":", "plist", "(", "parturl", ")", "dump", "(", "False", ")", "title", "=", "g", ".", "pafy_pls", "[", "parturl", "]", "[", "0", "]", ".", "title", "subdir", "=", "util", ".", "mswinfn", "(", "title", ".", "replace", "(", "'/'", ",", "'-'", ")", ")", "down_many", "(", "dltype", ",", "'1-'", ",", "subdir", "=", "subdir", ")", "msg", "=", "g", ".", "message", "plist", "(", "parturl", ")", "g", ".", "message", "=", "msg" ]
download youtube playlist .
train
false
3,207
def getEvaluatedBooleanDefault(defaultBoolean, key, xmlElement=None): if (xmlElement == None): return None if (key in xmlElement.attributeDictionary): return euclidean.getBooleanFromValue(getEvaluatedValueObliviously(key, xmlElement)) return defaultBoolean
[ "def", "getEvaluatedBooleanDefault", "(", "defaultBoolean", ",", "key", ",", "xmlElement", "=", "None", ")", ":", "if", "(", "xmlElement", "==", "None", ")", ":", "return", "None", "if", "(", "key", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "return", "euclidean", ".", "getBooleanFromValue", "(", "getEvaluatedValueObliviously", "(", "key", ",", "xmlElement", ")", ")", "return", "defaultBoolean" ]
get the evaluated boolean as a float .
train
false
3,210
def _assign_entity_to_pb(entity_pb, entity): bare_entity_pb = helpers.entity_to_protobuf(entity) bare_entity_pb.key.CopyFrom(bare_entity_pb.key) entity_pb.CopyFrom(bare_entity_pb)
[ "def", "_assign_entity_to_pb", "(", "entity_pb", ",", "entity", ")", ":", "bare_entity_pb", "=", "helpers", ".", "entity_to_protobuf", "(", "entity", ")", "bare_entity_pb", ".", "key", ".", "CopyFrom", "(", "bare_entity_pb", ".", "key", ")", "entity_pb", ".", "CopyFrom", "(", "bare_entity_pb", ")" ]
copy entity into entity_pb .
train
true
3,211
def exit_handler(): syslog.syslog(syslog.LOG_WARNING, 'Closing log') syslog.closelog()
[ "def", "exit_handler", "(", ")", ":", "syslog", ".", "syslog", "(", "syslog", ".", "LOG_WARNING", ",", "'Closing log'", ")", "syslog", ".", "closelog", "(", ")" ]
an exit handler for the daemon .
train
false
3,212
def dstack(tup): return concatenate([cupy.atleast_3d(m) for m in tup], 2)
[ "def", "dstack", "(", "tup", ")", ":", "return", "concatenate", "(", "[", "cupy", ".", "atleast_3d", "(", "m", ")", "for", "m", "in", "tup", "]", ",", "2", ")" ]
concatenate variables along third axis .
train
false
3,214
def safe_destroy_vdis(session, vdi_refs): for vdi_ref in vdi_refs: try: destroy_vdi(session, vdi_ref) except exception.StorageError: LOG.debug('Ignoring error while destroying VDI: %s', vdi_ref)
[ "def", "safe_destroy_vdis", "(", "session", ",", "vdi_refs", ")", ":", "for", "vdi_ref", "in", "vdi_refs", ":", "try", ":", "destroy_vdi", "(", "session", ",", "vdi_ref", ")", "except", "exception", ".", "StorageError", ":", "LOG", ".", "debug", "(", "'Ignoring error while destroying VDI: %s'", ",", "vdi_ref", ")" ]
destroys the requested vdis .
train
false
3,216
def no_fsl(): if (Info.version() is None): return True else: return False
[ "def", "no_fsl", "(", ")", ":", "if", "(", "Info", ".", "version", "(", ")", "is", "None", ")", ":", "return", "True", "else", ":", "return", "False" ]
checks if fsl is not installed used with skipif to skip tests that will fail if fsl is not installed .
train
false
3,217
def make_dict_from_messages(messages, full_dict=None): out = {} if (full_dict == None): full_dict = get_full_dict(frappe.local.lang) for m in messages: if (m[1] in full_dict): out[m[1]] = full_dict[m[1]] return out
[ "def", "make_dict_from_messages", "(", "messages", ",", "full_dict", "=", "None", ")", ":", "out", "=", "{", "}", "if", "(", "full_dict", "==", "None", ")", ":", "full_dict", "=", "get_full_dict", "(", "frappe", ".", "local", ".", "lang", ")", "for", "m", "in", "messages", ":", "if", "(", "m", "[", "1", "]", "in", "full_dict", ")", ":", "out", "[", "m", "[", "1", "]", "]", "=", "full_dict", "[", "m", "[", "1", "]", "]", "return", "out" ]
returns translated messages as a dict in language specified in frappe .
train
false
3,219
def extract_attributes(html_element): parser = HTMLAttributeParser() parser.feed(html_element) parser.close() return parser.attrs
[ "def", "extract_attributes", "(", "html_element", ")", ":", "parser", "=", "HTMLAttributeParser", "(", ")", "parser", ".", "feed", "(", "html_element", ")", "parser", ".", "close", "(", ")", "return", "parser", ".", "attrs" ]
given a string for an html element such as <el a="foo" b="bar" c="&98;az" d=boz empty= noval entity="&amp;" sq=" dq="" decode and return a dictionary of attributes .
train
false
3,220
def owner(*paths): return __salt__['lowpkg.owner'](*paths)
[ "def", "owner", "(", "*", "paths", ")", ":", "return", "__salt__", "[", "'lowpkg.owner'", "]", "(", "*", "paths", ")" ]
get the owner name of a file or directory .
train
false