id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,170
def no_unicode_pprint(dct): import pprint s = pprint.pformat(dct) print re.sub("u'", "'", s)
[ "def", "no_unicode_pprint", "(", "dct", ")", ":", "import", "pprint", "s", "=", "pprint", ".", "pformat", "(", "dct", ")", "print", "re", ".", "sub", "(", "\"u'\"", ",", "\"'\"", ",", "s", ")" ]
python 2/3 dict __repr__ may be different .
train
false
48,171
def rarfile_set_tool_path(config): unrar_tool = config[u'unrar_tool'] if unrar_tool: if (not rarfile): log.error(u'rar_tool specified with no rarfile module installed.') else: rarfile.UNRAR_TOOL = unrar_tool log.debug(u'Set RarFile.unrar_tool to: %s', unrar_tool)
[ "def", "rarfile_set_tool_path", "(", "config", ")", ":", "unrar_tool", "=", "config", "[", "u'unrar_tool'", "]", "if", "unrar_tool", ":", "if", "(", "not", "rarfile", ")", ":", "log", ".", "error", "(", "u'rar_tool specified with no rarfile module installed.'", ")", "else", ":", "rarfile", ".", "UNRAR_TOOL", "=", "unrar_tool", "log", ".", "debug", "(", "u'Set RarFile.unrar_tool to: %s'", ",", "unrar_tool", ")" ]
manually set the path of unrar executable if it cant be resolved from the path environment variable .
train
false
48,172
def _interpret_as_minutes(string, mdict): if ((string.count(':') == 1) and ('.' not in string) and (('hours' not in mdict) or (mdict['hours'] is None)) and (('days' not in mdict) or (mdict['days'] is None)) and (('weeks' not in mdict) or (mdict['weeks'] is None))): mdict['hours'] = mdict['mins'] mdict['mins'] = mdict['secs'] mdict.pop('secs') pass return mdict
[ "def", "_interpret_as_minutes", "(", "string", ",", "mdict", ")", ":", "if", "(", "(", "string", ".", "count", "(", "':'", ")", "==", "1", ")", "and", "(", "'.'", "not", "in", "string", ")", "and", "(", "(", "'hours'", "not", "in", "mdict", ")", "or", "(", "mdict", "[", "'hours'", "]", "is", "None", ")", ")", "and", "(", "(", "'days'", "not", "in", "mdict", ")", "or", "(", "mdict", "[", "'days'", "]", "is", "None", ")", ")", "and", "(", "(", "'weeks'", "not", "in", "mdict", ")", "or", "(", "mdict", "[", "'weeks'", "]", "is", "None", ")", ")", ")", ":", "mdict", "[", "'hours'", "]", "=", "mdict", "[", "'mins'", "]", "mdict", "[", "'mins'", "]", "=", "mdict", "[", "'secs'", "]", "mdict", ".", "pop", "(", "'secs'", ")", "pass", "return", "mdict" ]
times like "1:22" are ambiguous; do they represent minutes and seconds or hours and minutes? by default .
train
false
48,173
def _convert_to_object(jsonc_obj): if isinstance(jsonc_obj, Jsonc): plain = {} for (key, value) in jsonc_obj._dict.iteritems(): plain[key] = _convert_to_object(value) return plain elif isinstance(jsonc_obj, list): plain = [] for item in jsonc_obj: plain.append(_convert_to_object(item)) return plain else: return jsonc_obj
[ "def", "_convert_to_object", "(", "jsonc_obj", ")", ":", "if", "isinstance", "(", "jsonc_obj", ",", "Jsonc", ")", ":", "plain", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "jsonc_obj", ".", "_dict", ".", "iteritems", "(", ")", ":", "plain", "[", "key", "]", "=", "_convert_to_object", "(", "value", ")", "return", "plain", "elif", "isinstance", "(", "jsonc_obj", ",", "list", ")", ":", "plain", "=", "[", "]", "for", "item", "in", "jsonc_obj", ":", "plain", ".", "append", "(", "_convert_to_object", "(", "item", ")", ")", "return", "plain", "else", ":", "return", "jsonc_obj" ]
creates a new dict or list which has the data in the jsonc object .
train
false
48,174
def _coord_to_action(board, c): if (c == pachi_py.PASS_COORD): return _pass_action(board.size) if (c == pachi_py.RESIGN_COORD): return _resign_action(board.size) (i, j) = board.coord_to_ij(c) return ((i * board.size) + j)
[ "def", "_coord_to_action", "(", "board", ",", "c", ")", ":", "if", "(", "c", "==", "pachi_py", ".", "PASS_COORD", ")", ":", "return", "_pass_action", "(", "board", ".", "size", ")", "if", "(", "c", "==", "pachi_py", ".", "RESIGN_COORD", ")", ":", "return", "_resign_action", "(", "board", ".", "size", ")", "(", "i", ",", "j", ")", "=", "board", ".", "coord_to_ij", "(", "c", ")", "return", "(", "(", "i", "*", "board", ".", "size", ")", "+", "j", ")" ]
converts pachi coordinates to actions .
train
false
48,175
def build_SUMACLUST(): status('Building SUMACLUST...') cwd = getcwd() scripts = join(cwd, 'scripts') try: tempdir = mkdtemp() if download_file('ftp://ftp.microbio.me/pub/QIIME-v1.9.0-dependencies/suma_package_V_1.0.00.tar.gz', tempdir, 'suma_package_V_1.0.00.tar.gz'): status('Could not download SUMACLUST, so cannot install it.\n') return chdir(tempdir) if (not system_call('tar xzf suma_package_V_1.0.00.tar.gz', 'extract SUMACLUST archive')): return chdir('suma_package_V_1.0.00/sumaclust') if (not system_call('make', 'build SUMACLUST')): return copy('sumaclust', scripts) status('SUMACLUST built.\n') finally: rmtree(tempdir) chdir(cwd)
[ "def", "build_SUMACLUST", "(", ")", ":", "status", "(", "'Building SUMACLUST...'", ")", "cwd", "=", "getcwd", "(", ")", "scripts", "=", "join", "(", "cwd", ",", "'scripts'", ")", "try", ":", "tempdir", "=", "mkdtemp", "(", ")", "if", "download_file", "(", "'ftp://ftp.microbio.me/pub/QIIME-v1.9.0-dependencies/suma_package_V_1.0.00.tar.gz'", ",", "tempdir", ",", "'suma_package_V_1.0.00.tar.gz'", ")", ":", "status", "(", "'Could not download SUMACLUST, so cannot install it.\\n'", ")", "return", "chdir", "(", "tempdir", ")", "if", "(", "not", "system_call", "(", "'tar xzf suma_package_V_1.0.00.tar.gz'", ",", "'extract SUMACLUST archive'", ")", ")", ":", "return", "chdir", "(", "'suma_package_V_1.0.00/sumaclust'", ")", "if", "(", "not", "system_call", "(", "'make'", ",", "'build SUMACLUST'", ")", ")", ":", "return", "copy", "(", "'sumaclust'", ",", "scripts", ")", "status", "(", "'SUMACLUST built.\\n'", ")", "finally", ":", "rmtree", "(", "tempdir", ")", "chdir", "(", "cwd", ")" ]
download and build sumaclust then copy it to the scripts directory .
train
false
48,176
def _check_loglevel(level='info', quiet=False): def _bad_level(level): log.error("Invalid output_loglevel '{0}'. Valid levels are: {1}. Falling back to 'info'.".format(level, ', '.join(sorted(LOG_LEVELS, reverse=True)))) return LOG_LEVELS['info'] if (salt.utils.is_true(quiet) or (str(level).lower() == 'quiet')): return None try: level = level.lower() if (level not in LOG_LEVELS): return _bad_level(level) except AttributeError: return _bad_level(level) return LOG_LEVELS[level]
[ "def", "_check_loglevel", "(", "level", "=", "'info'", ",", "quiet", "=", "False", ")", ":", "def", "_bad_level", "(", "level", ")", ":", "log", ".", "error", "(", "\"Invalid output_loglevel '{0}'. Valid levels are: {1}. Falling back to 'info'.\"", ".", "format", "(", "level", ",", "', '", ".", "join", "(", "sorted", "(", "LOG_LEVELS", ",", "reverse", "=", "True", ")", ")", ")", ")", "return", "LOG_LEVELS", "[", "'info'", "]", "if", "(", "salt", ".", "utils", ".", "is_true", "(", "quiet", ")", "or", "(", "str", "(", "level", ")", ".", "lower", "(", ")", "==", "'quiet'", ")", ")", ":", "return", "None", "try", ":", "level", "=", "level", ".", "lower", "(", ")", "if", "(", "level", "not", "in", "LOG_LEVELS", ")", ":", "return", "_bad_level", "(", "level", ")", "except", "AttributeError", ":", "return", "_bad_level", "(", "level", ")", "return", "LOG_LEVELS", "[", "level", "]" ]
retrieve the level code for use in logging .
train
false
48,177
def test_bdf_data(): raw_py = _test_raw_reader(read_raw_edf, input_fname=bdf_path, montage=montage_path, eog=eog, misc=misc, exclude=['M2', 'IEOG']) assert_true(('RawEDF' in repr(raw_py))) picks = pick_types(raw_py.info, meg=False, eeg=True, exclude='bads') (data_py, _) = raw_py[picks] raw_eeglab = io.loadmat(bdf_eeglab_path) raw_eeglab = (raw_eeglab['data'] * 1e-06) data_eeglab = raw_eeglab[picks] assert_array_almost_equal(data_py, data_eeglab, 8) assert_true(raw_py.info['chs'][0]['loc'].any()) assert_true(raw_py.info['chs'][25]['loc'].any()) assert_true(raw_py.info['chs'][63]['loc'].any())
[ "def", "test_bdf_data", "(", ")", ":", "raw_py", "=", "_test_raw_reader", "(", "read_raw_edf", ",", "input_fname", "=", "bdf_path", ",", "montage", "=", "montage_path", ",", "eog", "=", "eog", ",", "misc", "=", "misc", ",", "exclude", "=", "[", "'M2'", ",", "'IEOG'", "]", ")", "assert_true", "(", "(", "'RawEDF'", "in", "repr", "(", "raw_py", ")", ")", ")", "picks", "=", "pick_types", "(", "raw_py", ".", "info", ",", "meg", "=", "False", ",", "eeg", "=", "True", ",", "exclude", "=", "'bads'", ")", "(", "data_py", ",", "_", ")", "=", "raw_py", "[", "picks", "]", "raw_eeglab", "=", "io", ".", "loadmat", "(", "bdf_eeglab_path", ")", "raw_eeglab", "=", "(", "raw_eeglab", "[", "'data'", "]", "*", "1e-06", ")", "data_eeglab", "=", "raw_eeglab", "[", "picks", "]", "assert_array_almost_equal", "(", "data_py", ",", "data_eeglab", ",", "8", ")", "assert_true", "(", "raw_py", ".", "info", "[", "'chs'", "]", "[", "0", "]", "[", "'loc'", "]", ".", "any", "(", ")", ")", "assert_true", "(", "raw_py", ".", "info", "[", "'chs'", "]", "[", "25", "]", "[", "'loc'", "]", ".", "any", "(", ")", ")", "assert_true", "(", "raw_py", ".", "info", "[", "'chs'", "]", "[", "63", "]", "[", "'loc'", "]", ".", "any", "(", ")", ")" ]
test reading raw bdf files .
train
false
48,178
def get_tolerance_for(item_code, item_tolerance={}, global_tolerance=None): if item_tolerance.get(item_code): return (item_tolerance[item_code], item_tolerance, global_tolerance) tolerance = flt((frappe.db.get_value(u'Item', item_code, u'tolerance') or 0)) if (not tolerance): if (global_tolerance == None): global_tolerance = flt(frappe.db.get_value(u'Stock Settings', None, u'tolerance')) tolerance = global_tolerance item_tolerance[item_code] = tolerance return (tolerance, item_tolerance, global_tolerance)
[ "def", "get_tolerance_for", "(", "item_code", ",", "item_tolerance", "=", "{", "}", ",", "global_tolerance", "=", "None", ")", ":", "if", "item_tolerance", ".", "get", "(", "item_code", ")", ":", "return", "(", "item_tolerance", "[", "item_code", "]", ",", "item_tolerance", ",", "global_tolerance", ")", "tolerance", "=", "flt", "(", "(", "frappe", ".", "db", ".", "get_value", "(", "u'Item'", ",", "item_code", ",", "u'tolerance'", ")", "or", "0", ")", ")", "if", "(", "not", "tolerance", ")", ":", "if", "(", "global_tolerance", "==", "None", ")", ":", "global_tolerance", "=", "flt", "(", "frappe", ".", "db", ".", "get_value", "(", "u'Stock Settings'", ",", "None", ",", "u'tolerance'", ")", ")", "tolerance", "=", "global_tolerance", "item_tolerance", "[", "item_code", "]", "=", "tolerance", "return", "(", "tolerance", ",", "item_tolerance", ",", "global_tolerance", ")" ]
returns the tolerance for the item .
train
false
48,179
def _get_design(user, design_id): try: return Document.objects.can_read_or_exception(user, Workflow, doc_id=design_id).content_object except Workflow.DoesNotExist: raise PopupException(_('Workflow not found'))
[ "def", "_get_design", "(", "user", ",", "design_id", ")", ":", "try", ":", "return", "Document", ".", "objects", ".", "can_read_or_exception", "(", "user", ",", "Workflow", ",", "doc_id", "=", "design_id", ")", ".", "content_object", "except", "Workflow", ".", "DoesNotExist", ":", "raise", "PopupException", "(", "_", "(", "'Workflow not found'", ")", ")" ]
raise popupexception if design doesnt exist .
train
false
48,180
def rgb_to_hex(rgb): return ('%02x%02x%02x' % rgb)
[ "def", "rgb_to_hex", "(", "rgb", ")", ":", "return", "(", "'%02x%02x%02x'", "%", "rgb", ")" ]
rgb to hex .
train
false
48,181
def validate_common_config(settings): if (not getattr(settings, 'LMS_ROOT_URL', None)): raise ValueError("'LMS_ROOT_URL' is not defined.")
[ "def", "validate_common_config", "(", "settings", ")", ":", "if", "(", "not", "getattr", "(", "settings", ",", "'LMS_ROOT_URL'", ",", "None", ")", ")", ":", "raise", "ValueError", "(", "\"'LMS_ROOT_URL' is not defined.\"", ")" ]
validates configurations common for all apps .
train
false
48,182
def serial_for_url(url, *args, **kwargs): do_open = (('do_not_open' not in kwargs) or (not kwargs['do_not_open'])) if ('do_not_open' in kwargs): del kwargs['do_not_open'] klass = Serial try: url_nocase = url.lower() except AttributeError: pass else: if ('://' in url_nocase): protocol = url_nocase.split('://', 1)[0] for package_name in protocol_handler_packages: module_name = ('%s.protocol_%s' % (package_name, protocol)) try: handler_module = __import__(module_name) except ImportError: pass else: klass = sys.modules[module_name].Serial break else: raise ValueError(('invalid URL, protocol %r not known' % (protocol,))) else: klass = Serial instance = klass(None, *args, **kwargs) instance.port = url if do_open: instance.open() return instance
[ "def", "serial_for_url", "(", "url", ",", "*", "args", ",", "**", "kwargs", ")", ":", "do_open", "=", "(", "(", "'do_not_open'", "not", "in", "kwargs", ")", "or", "(", "not", "kwargs", "[", "'do_not_open'", "]", ")", ")", "if", "(", "'do_not_open'", "in", "kwargs", ")", ":", "del", "kwargs", "[", "'do_not_open'", "]", "klass", "=", "Serial", "try", ":", "url_nocase", "=", "url", ".", "lower", "(", ")", "except", "AttributeError", ":", "pass", "else", ":", "if", "(", "'://'", "in", "url_nocase", ")", ":", "protocol", "=", "url_nocase", ".", "split", "(", "'://'", ",", "1", ")", "[", "0", "]", "for", "package_name", "in", "protocol_handler_packages", ":", "module_name", "=", "(", "'%s.protocol_%s'", "%", "(", "package_name", ",", "protocol", ")", ")", "try", ":", "handler_module", "=", "__import__", "(", "module_name", ")", "except", "ImportError", ":", "pass", "else", ":", "klass", "=", "sys", ".", "modules", "[", "module_name", "]", ".", "Serial", "break", "else", ":", "raise", "ValueError", "(", "(", "'invalid URL, protocol %r not known'", "%", "(", "protocol", ",", ")", ")", ")", "else", ":", "klass", "=", "Serial", "instance", "=", "klass", "(", "None", ",", "*", "args", ",", "**", "kwargs", ")", "instance", ".", "port", "=", "url", "if", "do_open", ":", "instance", ".", "open", "(", ")", "return", "instance" ]
get an instance of the serial class .
train
false
48,183
def MakeCdfFromList(seq, label=None): return Cdf(seq, label=label)
[ "def", "MakeCdfFromList", "(", "seq", ",", "label", "=", "None", ")", ":", "return", "Cdf", "(", "seq", ",", "label", "=", "label", ")" ]
creates a cdf from an unsorted sequence .
train
false
48,184
def conv_cond_concat(x, y): return T.concatenate([x, (y * T.ones((x.shape[0], y.shape[1], x.shape[2], x.shape[3])))], axis=1)
[ "def", "conv_cond_concat", "(", "x", ",", "y", ")", ":", "return", "T", ".", "concatenate", "(", "[", "x", ",", "(", "y", "*", "T", ".", "ones", "(", "(", "x", ".", "shape", "[", "0", "]", ",", "y", ".", "shape", "[", "1", "]", ",", "x", ".", "shape", "[", "2", "]", ",", "x", ".", "shape", "[", "3", "]", ")", ")", ")", "]", ",", "axis", "=", "1", ")" ]
concatenate conditioning vector on feature map axis .
train
false
48,185
def walk(top, func, arg): warnings.warnpy3k('In 3.x, os.path.walk is removed in favor of os.walk.') try: names = os.listdir(top) except os.error: return func(arg, top, names) for name in names: name = join(top, name) if isdir(name): walk(name, func, arg)
[ "def", "walk", "(", "top", ",", "func", ",", "arg", ")", ":", "warnings", ".", "warnpy3k", "(", "'In 3.x, os.path.walk is removed in favor of os.walk.'", ")", "try", ":", "names", "=", "os", ".", "listdir", "(", "top", ")", "except", "os", ".", "error", ":", "return", "func", "(", "arg", ",", "top", ",", "names", ")", "for", "name", "in", "names", ":", "name", "=", "join", "(", "top", ",", "name", ")", "if", "isdir", "(", "name", ")", ":", "walk", "(", "name", ",", "func", ",", "arg", ")" ]
a version of os .
train
false
48,186
def install_packages(module, xbps_path, state, packages): toInstall = [] for (i, package) in enumerate(packages): 'If the package is installed and state == present or state == latest\n and is up-to-date then skip' (installed, updated) = query_package(module, xbps_path, package) if (installed and ((state == 'present') or ((state == 'latest') and updated))): continue toInstall.append(package) if (len(toInstall) == 0): module.exit_json(changed=False, msg='Nothing to Install') cmd = ('%s -y %s' % (xbps_path['install'], ' '.join(toInstall))) (rc, stdout, stderr) = module.run_command(cmd, check_rc=False) if ((rc != 0) and (not ((state == 'latest') and (rc == 17)))): module.fail_json(msg=('failed to install %s' % package)) module.exit_json(changed=True, msg=('installed %s package(s)' % len(toInstall)), packages=toInstall) module.exit_json(changed=False, msg='package(s) already installed', packages=[])
[ "def", "install_packages", "(", "module", ",", "xbps_path", ",", "state", ",", "packages", ")", ":", "toInstall", "=", "[", "]", "for", "(", "i", ",", "package", ")", "in", "enumerate", "(", "packages", ")", ":", "(", "installed", ",", "updated", ")", "=", "query_package", "(", "module", ",", "xbps_path", ",", "package", ")", "if", "(", "installed", "and", "(", "(", "state", "==", "'present'", ")", "or", "(", "(", "state", "==", "'latest'", ")", "and", "updated", ")", ")", ")", ":", "continue", "toInstall", ".", "append", "(", "package", ")", "if", "(", "len", "(", "toInstall", ")", "==", "0", ")", ":", "module", ".", "exit_json", "(", "changed", "=", "False", ",", "msg", "=", "'Nothing to Install'", ")", "cmd", "=", "(", "'%s -y %s'", "%", "(", "xbps_path", "[", "'install'", "]", ",", "' '", ".", "join", "(", "toInstall", ")", ")", ")", "(", "rc", ",", "stdout", ",", "stderr", ")", "=", "module", ".", "run_command", "(", "cmd", ",", "check_rc", "=", "False", ")", "if", "(", "(", "rc", "!=", "0", ")", "and", "(", "not", "(", "(", "state", "==", "'latest'", ")", "and", "(", "rc", "==", "17", ")", ")", ")", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'failed to install %s'", "%", "package", ")", ")", "module", ".", "exit_json", "(", "changed", "=", "True", ",", "msg", "=", "(", "'installed %s package(s)'", "%", "len", "(", "toInstall", ")", ")", ",", "packages", "=", "toInstall", ")", "module", ".", "exit_json", "(", "changed", "=", "False", ",", "msg", "=", "'package(s) already installed'", ",", "packages", "=", "[", "]", ")" ]
returns true if package install succeeds .
train
false
48,187
@mock_streams('stdout') def test_puts_without_prefix(): s = 'my output' puts(s, show_prefix=False) eq_(sys.stdout.getvalue(), ('%s' % (s + '\n')))
[ "@", "mock_streams", "(", "'stdout'", ")", "def", "test_puts_without_prefix", "(", ")", ":", "s", "=", "'my output'", "puts", "(", "s", ",", "show_prefix", "=", "False", ")", "eq_", "(", "sys", ".", "stdout", ".", "getvalue", "(", ")", ",", "(", "'%s'", "%", "(", "s", "+", "'\\n'", ")", ")", ")" ]
puts() shouldnt prefix output with env .
train
false
48,188
def json_format(filename, indent=DEFAULT_INDENT_SIZE, **kwargs): console = kwargs.get('console', logging.getLogger('console')) encoding = kwargs.get('encoding', None) dry_run = kwargs.get('dry_run', False) if (indent is None): sort_keys = False else: sort_keys = True message = ('%s ...' % filename) contents = open(filename, 'r').read() data = json.loads(contents, encoding=encoding) contents2 = json.dumps(data, indent=indent, sort_keys=sort_keys) contents2 = contents2.strip() contents2 = ('%s\n' % contents2) if (contents == contents2): console.info('%s SKIP (already pretty)', message) return 2 elif (not dry_run): outfile = open(filename, 'w') outfile.write(contents2) outfile.close() console.warn('%s OK', message) return 1
[ "def", "json_format", "(", "filename", ",", "indent", "=", "DEFAULT_INDENT_SIZE", ",", "**", "kwargs", ")", ":", "console", "=", "kwargs", ".", "get", "(", "'console'", ",", "logging", ".", "getLogger", "(", "'console'", ")", ")", "encoding", "=", "kwargs", ".", "get", "(", "'encoding'", ",", "None", ")", "dry_run", "=", "kwargs", ".", "get", "(", "'dry_run'", ",", "False", ")", "if", "(", "indent", "is", "None", ")", ":", "sort_keys", "=", "False", "else", ":", "sort_keys", "=", "True", "message", "=", "(", "'%s ...'", "%", "filename", ")", "contents", "=", "open", "(", "filename", ",", "'r'", ")", ".", "read", "(", ")", "data", "=", "json", ".", "loads", "(", "contents", ",", "encoding", "=", "encoding", ")", "contents2", "=", "json", ".", "dumps", "(", "data", ",", "indent", "=", "indent", ",", "sort_keys", "=", "sort_keys", ")", "contents2", "=", "contents2", ".", "strip", "(", ")", "contents2", "=", "(", "'%s\\n'", "%", "contents2", ")", "if", "(", "contents", "==", "contents2", ")", ":", "console", ".", "info", "(", "'%s SKIP (already pretty)'", ",", "message", ")", "return", "2", "elif", "(", "not", "dry_run", ")", ":", "outfile", "=", "open", "(", "filename", ",", "'w'", ")", "outfile", ".", "write", "(", "contents2", ")", "outfile", ".", "close", "(", ")", "console", ".", "warn", "(", "'%s OK'", ",", "message", ")", "return", "1" ]
format/beautify a json file .
train
false
48,189
def _desc_save(caller, buf): caller.db.evmenu_target.db.desc = buf caller.msg('Saved.') return True
[ "def", "_desc_save", "(", "caller", ",", "buf", ")", ":", "caller", ".", "db", ".", "evmenu_target", ".", "db", ".", "desc", "=", "buf", "caller", ".", "msg", "(", "'Saved.'", ")", "return", "True" ]
save line buffer to the desc prop .
train
false
48,192
def get_incompatible_reqs(dist, installed_dists): installed_dists_by_name = {} for installed_dist in installed_dists: installed_dists_by_name[installed_dist.project_name] = installed_dist for requirement in dist.requires(): present_dist = installed_dists_by_name.get(requirement.project_name) if (present_dist and (present_dist not in requirement)): (yield (requirement, present_dist))
[ "def", "get_incompatible_reqs", "(", "dist", ",", "installed_dists", ")", ":", "installed_dists_by_name", "=", "{", "}", "for", "installed_dist", "in", "installed_dists", ":", "installed_dists_by_name", "[", "installed_dist", ".", "project_name", "]", "=", "installed_dist", "for", "requirement", "in", "dist", ".", "requires", "(", ")", ":", "present_dist", "=", "installed_dists_by_name", ".", "get", "(", "requirement", ".", "project_name", ")", "if", "(", "present_dist", "and", "(", "present_dist", "not", "in", "requirement", ")", ")", ":", "(", "yield", "(", "requirement", ",", "present_dist", ")", ")" ]
return all of the requirements of dist that are present in installed_dists .
train
false
48,193
def cartesian_to_spherical(x, y, z): if (not hasattr(x, u'unit')): x = (x * u.dimensionless_unscaled) if (not hasattr(y, u'unit')): y = (y * u.dimensionless_unscaled) if (not hasattr(z, u'unit')): z = (z * u.dimensionless_unscaled) cart = CartesianRepresentation(x, y, z) sph = cart.represent_as(SphericalRepresentation) return (sph.distance, sph.lat, sph.lon)
[ "def", "cartesian_to_spherical", "(", "x", ",", "y", ",", "z", ")", ":", "if", "(", "not", "hasattr", "(", "x", ",", "u'unit'", ")", ")", ":", "x", "=", "(", "x", "*", "u", ".", "dimensionless_unscaled", ")", "if", "(", "not", "hasattr", "(", "y", ",", "u'unit'", ")", ")", ":", "y", "=", "(", "y", "*", "u", ".", "dimensionless_unscaled", ")", "if", "(", "not", "hasattr", "(", "z", ",", "u'unit'", ")", ")", ":", "z", "=", "(", "z", "*", "u", ".", "dimensionless_unscaled", ")", "cart", "=", "CartesianRepresentation", "(", "x", ",", "y", ",", "z", ")", "sph", "=", "cart", ".", "represent_as", "(", "SphericalRepresentation", ")", "return", "(", "sph", ".", "distance", ",", "sph", ".", "lat", ",", "sph", ".", "lon", ")" ]
converts 3d rectangular cartesian coordinates to spherical polar coordinates .
train
false
48,194
def get_alert_config(deployment_id, metric_name=None, api_key=None, profile='telemetry'): auth = _auth(profile=profile) alert = False key = 'telemetry.{0}.alerts'.format(deployment_id) if (key not in __context__): try: get_url = (_get_telemetry_base(profile) + '/alerts?deployment={0}'.format(deployment_id)) response = requests.get(get_url, headers=auth) except requests.exceptions.RequestException as e: log.error(str(e)) return False http_result = {} if (response.status_code == 200): for alert in response.json(): http_result[alert.get('condition', {}).get('metric')] = alert __context__[key] = http_result if (not __context__.get(key)): return [] alerts = __context__[key].values() if metric_name: return __context__[key].get(metric_name) return [alert['_id'] for alert in alerts if ('_id' in alert)]
[ "def", "get_alert_config", "(", "deployment_id", ",", "metric_name", "=", "None", ",", "api_key", "=", "None", ",", "profile", "=", "'telemetry'", ")", ":", "auth", "=", "_auth", "(", "profile", "=", "profile", ")", "alert", "=", "False", "key", "=", "'telemetry.{0}.alerts'", ".", "format", "(", "deployment_id", ")", "if", "(", "key", "not", "in", "__context__", ")", ":", "try", ":", "get_url", "=", "(", "_get_telemetry_base", "(", "profile", ")", "+", "'/alerts?deployment={0}'", ".", "format", "(", "deployment_id", ")", ")", "response", "=", "requests", ".", "get", "(", "get_url", ",", "headers", "=", "auth", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "log", ".", "error", "(", "str", "(", "e", ")", ")", "return", "False", "http_result", "=", "{", "}", "if", "(", "response", ".", "status_code", "==", "200", ")", ":", "for", "alert", "in", "response", ".", "json", "(", ")", ":", "http_result", "[", "alert", ".", "get", "(", "'condition'", ",", "{", "}", ")", ".", "get", "(", "'metric'", ")", "]", "=", "alert", "__context__", "[", "key", "]", "=", "http_result", "if", "(", "not", "__context__", ".", "get", "(", "key", ")", ")", ":", "return", "[", "]", "alerts", "=", "__context__", "[", "key", "]", ".", "values", "(", ")", "if", "metric_name", ":", "return", "__context__", "[", "key", "]", ".", "get", "(", "metric_name", ")", "return", "[", "alert", "[", "'_id'", "]", "for", "alert", "in", "alerts", "if", "(", "'_id'", "in", "alert", ")", "]" ]
get all alert definitions associated with a given deployment or if metric_name is specified .
train
true
48,195
@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info >= (3,)") def test_install_package_that_emits_unicode(script, data): to_install = data.packages.join('BrokenEmitsUTF8') result = script.pip('install', to_install, expect_error=True, expect_temp=True, quiet=True) assert ('FakeError: this package designed to fail on install' in result.stdout) assert ('UnicodeDecodeError' not in result.stdout)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "\"sys.platform == 'win32' and sys.version_info >= (3,)\"", ")", "def", "test_install_package_that_emits_unicode", "(", "script", ",", "data", ")", ":", "to_install", "=", "data", ".", "packages", ".", "join", "(", "'BrokenEmitsUTF8'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "to_install", ",", "expect_error", "=", "True", ",", "expect_temp", "=", "True", ",", "quiet", "=", "True", ")", "assert", "(", "'FakeError: this package designed to fail on install'", "in", "result", ".", "stdout", ")", "assert", "(", "'UnicodeDecodeError'", "not", "in", "result", ".", "stdout", ")" ]
install a package with a setup .
train
false
48,197
def create_schema(name, database, owner=None): if owner: _run_as_pg(('psql %(database)s -c "CREATE SCHEMA %(name)s AUTHORIZATION %(owner)s"' % locals())) else: _run_as_pg(('psql %(database)s -c "CREATE SCHEMA %(name)s"' % locals()))
[ "def", "create_schema", "(", "name", ",", "database", ",", "owner", "=", "None", ")", ":", "if", "owner", ":", "_run_as_pg", "(", "(", "'psql %(database)s -c \"CREATE SCHEMA %(name)s AUTHORIZATION %(owner)s\"'", "%", "locals", "(", ")", ")", ")", "else", ":", "_run_as_pg", "(", "(", "'psql %(database)s -c \"CREATE SCHEMA %(name)s\"'", "%", "locals", "(", ")", ")", ")" ]
create a schema within a database .
train
false
48,198
def ROC(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.ROC, timeperiod)
[ "def", "ROC", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "ROC", ",", "timeperiod", ")" ]
rate of change : (-1)*100 .
train
false
48,200
def _check_rvm(ret, user=None): if (not __salt__['rvm.is_installed'](user)): ret['result'] = False ret['comment'] = 'RVM is not installed.' return ret
[ "def", "_check_rvm", "(", "ret", ",", "user", "=", "None", ")", ":", "if", "(", "not", "__salt__", "[", "'rvm.is_installed'", "]", "(", "user", ")", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'RVM is not installed.'", "return", "ret" ]
check to see if rvm is installed .
train
false
48,202
def retcode_pillar(pillar_name): groups = __salt__['pillar.get'](pillar_name) check = {} data = {} for group in groups: commands = groups[group] for command in commands: if isinstance(command, dict): plugin = next(six.iterkeys(command)) args = command[plugin] else: plugin = command args = '' check.update(retcode(plugin, args, group)) current_value = 0 new_value = int(check[group]['status']) if (group in data): current_value = int(data[group]['status']) if ((new_value > current_value) or (group not in data)): if (group not in data): data[group] = {} data[group]['status'] = new_value return data
[ "def", "retcode_pillar", "(", "pillar_name", ")", ":", "groups", "=", "__salt__", "[", "'pillar.get'", "]", "(", "pillar_name", ")", "check", "=", "{", "}", "data", "=", "{", "}", "for", "group", "in", "groups", ":", "commands", "=", "groups", "[", "group", "]", "for", "command", "in", "commands", ":", "if", "isinstance", "(", "command", ",", "dict", ")", ":", "plugin", "=", "next", "(", "six", ".", "iterkeys", "(", "command", ")", ")", "args", "=", "command", "[", "plugin", "]", "else", ":", "plugin", "=", "command", "args", "=", "''", "check", ".", "update", "(", "retcode", "(", "plugin", ",", "args", ",", "group", ")", ")", "current_value", "=", "0", "new_value", "=", "int", "(", "check", "[", "group", "]", "[", "'status'", "]", ")", "if", "(", "group", "in", "data", ")", ":", "current_value", "=", "int", "(", "data", "[", "group", "]", "[", "'status'", "]", ")", "if", "(", "(", "new_value", ">", "current_value", ")", "or", "(", "group", "not", "in", "data", ")", ")", ":", "if", "(", "group", "not", "in", "data", ")", ":", "data", "[", "group", "]", "=", "{", "}", "data", "[", "group", "]", "[", "'status'", "]", "=", "new_value", "return", "data" ]
run one or more nagios plugins from pillar data and get the result of cmd .
train
true
48,203
def calendar_id2real_id(calendar_id=None, with_date=False): if (calendar_id and isinstance(calendar_id, basestring)): res = filter(None, calendar_id.split('-')) if (len(res) == 2): real_id = res[0] if with_date: real_date = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT, time.strptime(res[1], VIRTUALID_DATETIME_FORMAT)) start = datetime.strptime(real_date, DEFAULT_SERVER_DATETIME_FORMAT) end = (start + timedelta(hours=with_date)) return (int(real_id), real_date, end.strftime(DEFAULT_SERVER_DATETIME_FORMAT)) return int(real_id) return ((calendar_id and int(calendar_id)) or calendar_id)
[ "def", "calendar_id2real_id", "(", "calendar_id", "=", "None", ",", "with_date", "=", "False", ")", ":", "if", "(", "calendar_id", "and", "isinstance", "(", "calendar_id", ",", "basestring", ")", ")", ":", "res", "=", "filter", "(", "None", ",", "calendar_id", ".", "split", "(", "'-'", ")", ")", "if", "(", "len", "(", "res", ")", "==", "2", ")", ":", "real_id", "=", "res", "[", "0", "]", "if", "with_date", ":", "real_date", "=", "time", ".", "strftime", "(", "DEFAULT_SERVER_DATETIME_FORMAT", ",", "time", ".", "strptime", "(", "res", "[", "1", "]", ",", "VIRTUALID_DATETIME_FORMAT", ")", ")", "start", "=", "datetime", ".", "strptime", "(", "real_date", ",", "DEFAULT_SERVER_DATETIME_FORMAT", ")", "end", "=", "(", "start", "+", "timedelta", "(", "hours", "=", "with_date", ")", ")", "return", "(", "int", "(", "real_id", ")", ",", "real_date", ",", "end", ".", "strftime", "(", "DEFAULT_SERVER_DATETIME_FORMAT", ")", ")", "return", "int", "(", "real_id", ")", "return", "(", "(", "calendar_id", "and", "int", "(", "calendar_id", ")", ")", "or", "calendar_id", ")" ]
convert a "virtual/recurring event id" into a real event id .
train
false
48,204
def capitalize(s): return s.capitalize()
[ "def", "capitalize", "(", "s", ")", ":", "return", "s", ".", "capitalize", "(", ")" ]
capitalize(s) -> string return a copy of the string s with only its first character capitalized .
train
false
48,205
def aggregate_create(context, values, metadata=None): return IMPL.aggregate_create(context, values, metadata)
[ "def", "aggregate_create", "(", "context", ",", "values", ",", "metadata", "=", "None", ")", ":", "return", "IMPL", ".", "aggregate_create", "(", "context", ",", "values", ",", "metadata", ")" ]
create a new aggregate with metadata .
train
false
48,206
def test_long_title(Chart, datas): chart = Chart(title="A chart is a graphical representation of data, in which 'the data is represented by symbols, such as bars in a bar chart, lines in a line chart, or slices in a pie chart'. A chart can represent tabular numeric data, functions or some kinds of qualitative structure and provides different info.") chart = make_data(chart, datas) q = chart.render_pyquery() assert (len(q('.titles text')) == 5)
[ "def", "test_long_title", "(", "Chart", ",", "datas", ")", ":", "chart", "=", "Chart", "(", "title", "=", "\"A chart is a graphical representation of data, in which 'the data is represented by symbols, such as bars in a bar chart, lines in a line chart, or slices in a pie chart'. A chart can represent tabular numeric data, functions or some kinds of qualitative structure and provides different info.\"", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "q", "=", "chart", ".", "render_pyquery", "(", ")", "assert", "(", "len", "(", "q", "(", "'.titles text'", ")", ")", "==", "5", ")" ]
test chart rendering with a long title .
train
false
48,207
def language_callback(lexer, match): l = None m = language_re.match(lexer.text[match.end():(match.end() + 100)]) if (m is not None): l = lexer._get_lexer(m.group(1)) else: m = list(language_re.finditer(lexer.text[max(0, (match.start() - 100)):match.start()])) if m: l = lexer._get_lexer(m[(-1)].group(1)) if l: (yield (match.start(1), String, match.group(1))) for x in l.get_tokens_unprocessed(match.group(2)): (yield x) (yield (match.start(3), String, match.group(3))) else: (yield (match.start(), String, match.group()))
[ "def", "language_callback", "(", "lexer", ",", "match", ")", ":", "l", "=", "None", "m", "=", "language_re", ".", "match", "(", "lexer", ".", "text", "[", "match", ".", "end", "(", ")", ":", "(", "match", ".", "end", "(", ")", "+", "100", ")", "]", ")", "if", "(", "m", "is", "not", "None", ")", ":", "l", "=", "lexer", ".", "_get_lexer", "(", "m", ".", "group", "(", "1", ")", ")", "else", ":", "m", "=", "list", "(", "language_re", ".", "finditer", "(", "lexer", ".", "text", "[", "max", "(", "0", ",", "(", "match", ".", "start", "(", ")", "-", "100", ")", ")", ":", "match", ".", "start", "(", ")", "]", ")", ")", "if", "m", ":", "l", "=", "lexer", ".", "_get_lexer", "(", "m", "[", "(", "-", "1", ")", "]", ".", "group", "(", "1", ")", ")", "if", "l", ":", "(", "yield", "(", "match", ".", "start", "(", "1", ")", ",", "String", ",", "match", ".", "group", "(", "1", ")", ")", ")", "for", "x", "in", "l", ".", "get_tokens_unprocessed", "(", "match", ".", "group", "(", "2", ")", ")", ":", "(", "yield", "x", ")", "(", "yield", "(", "match", ".", "start", "(", "3", ")", ",", "String", ",", "match", ".", "group", "(", "3", ")", ")", ")", "else", ":", "(", "yield", "(", "match", ".", "start", "(", ")", ",", "String", ",", "match", ".", "group", "(", ")", ")", ")" ]
parse the content of a $-string using a lexer the lexer is chosen looking for a nearby language .
train
false
48,210
def do_int(value, default=0, base=10): try: if isinstance(value, string_types): return int(value, base) return int(value) except (TypeError, ValueError): try: return int(float(value)) except (TypeError, ValueError): return default
[ "def", "do_int", "(", "value", ",", "default", "=", "0", ",", "base", "=", "10", ")", ":", "try", ":", "if", "isinstance", "(", "value", ",", "string_types", ")", ":", "return", "int", "(", "value", ",", "base", ")", "return", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "try", ":", "return", "int", "(", "float", "(", "value", ")", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "default" ]
convert the value into an integer .
train
true
48,211
def toFile(filename, data): f = open(filename, 'w') cPickle.dump(data, f) f.close()
[ "def", "toFile", "(", "filename", ",", "data", ")", ":", "f", "=", "open", "(", "filename", ",", "'w'", ")", "cPickle", ".", "dump", "(", "data", ",", "f", ")", "f", ".", "close", "(", ")" ]
save data as a pickle file .
train
false
48,212
def resource_name_base(name): return name[(name.rfind(PRN_SEPARATOR) + 1):]
[ "def", "resource_name_base", "(", "name", ")", ":", "return", "name", "[", "(", "name", ".", "rfind", "(", "PRN_SEPARATOR", ")", "+", "1", ")", ":", "]" ]
pkg/typename -> typename .
train
false
48,213
def RunDBA(callback): logging.warning('WARNING: this tool can modify low-level DynamoDB tables and attributes and should be used with caution. For example, modifying a photo or adding a label directly will not update secondary indexes nor create user updates.') def _OnInit(verified_schema): if (options.options.op == 'list'): def _OnList(result): logging.info(result) callback() db_client.DBClient.Instance().ListTables(callback=_OnList) else: if (options.options.tables == 'ALL'): tables = vf_schema.SCHEMA.GetTables() else: tables = [vf_schema.SCHEMA.GetTable(n) for n in options.options.tables] assert tables, 'no tables were specified' with util.Barrier(callback) as b: for table in tables: RunOpOnTable(db_client.DBClient.Instance(), table, options.options.op, b.Callback()) db_client.InitDB(vf_schema.SCHEMA, callback=_OnInit, verify_or_create=options.options.verify_or_create)
[ "def", "RunDBA", "(", "callback", ")", ":", "logging", ".", "warning", "(", "'WARNING: this tool can modify low-level DynamoDB tables and attributes and should be used with caution. For example, modifying a photo or adding a label directly will not update secondary indexes nor create user updates.'", ")", "def", "_OnInit", "(", "verified_schema", ")", ":", "if", "(", "options", ".", "options", ".", "op", "==", "'list'", ")", ":", "def", "_OnList", "(", "result", ")", ":", "logging", ".", "info", "(", "result", ")", "callback", "(", ")", "db_client", ".", "DBClient", ".", "Instance", "(", ")", ".", "ListTables", "(", "callback", "=", "_OnList", ")", "else", ":", "if", "(", "options", ".", "options", ".", "tables", "==", "'ALL'", ")", ":", "tables", "=", "vf_schema", ".", "SCHEMA", ".", "GetTables", "(", ")", "else", ":", "tables", "=", "[", "vf_schema", ".", "SCHEMA", ".", "GetTable", "(", "n", ")", "for", "n", "in", "options", ".", "options", ".", "tables", "]", "assert", "tables", ",", "'no tables were specified'", "with", "util", ".", "Barrier", "(", "callback", ")", "as", "b", ":", "for", "table", "in", "tables", ":", "RunOpOnTable", "(", "db_client", ".", "DBClient", ".", "Instance", "(", ")", ",", "table", ",", "options", ".", "options", ".", "op", ",", "b", ".", "Callback", "(", ")", ")", "db_client", ".", "InitDB", "(", "vf_schema", ".", "SCHEMA", ",", "callback", "=", "_OnInit", ",", "verify_or_create", "=", "options", ".", "options", ".", "verify_or_create", ")" ]
runs op on each table listed in --tables .
train
false
48,214
def is_building(): if (len(sys.argv) < 2): return True info_commands = ['--help-commands', '--name', '--version', '-V', '--fullname', '--author', '--author-email', '--maintainer', '--maintainer-email', '--contact', '--contact-email', '--url', '--license', '--description', '--long-description', '--platforms', '--classifiers', '--keywords', '--provides', '--requires', '--obsoletes'] info_commands.extend(['egg_info', 'install_egg_info', 'rotate']) for command in info_commands: if (command in sys.argv[1:]): return False return True
[ "def", "is_building", "(", ")", ":", "if", "(", "len", "(", "sys", ".", "argv", ")", "<", "2", ")", ":", "return", "True", "info_commands", "=", "[", "'--help-commands'", ",", "'--name'", ",", "'--version'", ",", "'-V'", ",", "'--fullname'", ",", "'--author'", ",", "'--author-email'", ",", "'--maintainer'", ",", "'--maintainer-email'", ",", "'--contact'", ",", "'--contact-email'", ",", "'--url'", ",", "'--license'", ",", "'--description'", ",", "'--long-description'", ",", "'--platforms'", ",", "'--classifiers'", ",", "'--keywords'", ",", "'--provides'", ",", "'--requires'", ",", "'--obsoletes'", "]", "info_commands", ".", "extend", "(", "[", "'egg_info'", ",", "'install_egg_info'", ",", "'rotate'", "]", ")", "for", "command", "in", "info_commands", ":", "if", "(", "command", "in", "sys", ".", "argv", "[", "1", ":", "]", ")", ":", "return", "False", "return", "True" ]
parse the setup .
train
false
48,216
def filer_image_from_upload(request, path, upload_data, sha1=None): return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
[ "def", "filer_image_from_upload", "(", "request", ",", "path", ",", "upload_data", ",", "sha1", "=", "None", ")", ":", "return", "_filer_file_from_upload", "(", "model", "=", "Image", ",", "request", "=", "request", ",", "path", "=", "path", ",", "upload_data", "=", "upload_data", ",", "sha1", "=", "sha1", ")" ]
create a filer image from an upload .
train
false
48,217
def GetMedicationHTML(feed): if (not feed.entry): return '<b>No entries in feed</b><br>' html = [] for entry in feed.entry: try: ccr = entry.FindExtensions('ContinuityOfCareRecord')[0] body = ccr.FindChildren('Body')[0] meds = body.FindChildren('Medications')[0].FindChildren('Medication') for med in meds: name = med.FindChildren('Product')[0].FindChildren('ProductName')[0] html.append(('<li>%s</li>' % name.FindChildren('Text')[0].text)) except: html.append('<b>No medication data in this profile</b><br>') return ('<ul>%s</ul>' % ''.join(html))
[ "def", "GetMedicationHTML", "(", "feed", ")", ":", "if", "(", "not", "feed", ".", "entry", ")", ":", "return", "'<b>No entries in feed</b><br>'", "html", "=", "[", "]", "for", "entry", "in", "feed", ".", "entry", ":", "try", ":", "ccr", "=", "entry", ".", "FindExtensions", "(", "'ContinuityOfCareRecord'", ")", "[", "0", "]", "body", "=", "ccr", ".", "FindChildren", "(", "'Body'", ")", "[", "0", "]", "meds", "=", "body", ".", "FindChildren", "(", "'Medications'", ")", "[", "0", "]", ".", "FindChildren", "(", "'Medication'", ")", "for", "med", "in", "meds", ":", "name", "=", "med", ".", "FindChildren", "(", "'Product'", ")", "[", "0", "]", ".", "FindChildren", "(", "'ProductName'", ")", "[", "0", "]", "html", ".", "append", "(", "(", "'<li>%s</li>'", "%", "name", ".", "FindChildren", "(", "'Text'", ")", "[", "0", "]", ".", "text", ")", ")", "except", ":", "html", ".", "append", "(", "'<b>No medication data in this profile</b><br>'", ")", "return", "(", "'<ul>%s</ul>'", "%", "''", ".", "join", "(", "html", ")", ")" ]
prints out the users medication to the console .
train
false
48,219
def HTTP(port=80, **kwargs): return rule(port, **kwargs)
[ "def", "HTTP", "(", "port", "=", "80", ",", "**", "kwargs", ")", ":", "return", "rule", "(", "port", ",", "**", "kwargs", ")" ]
helper to build a firewall rule for http connections extra args will be passed to :py:func:~fabtools .
train
false
48,221
def mcycles_to_msecs(mcycles): return int((mcycles_to_seconds(mcycles) * 1000))
[ "def", "mcycles_to_msecs", "(", "mcycles", ")", ":", "return", "int", "(", "(", "mcycles_to_seconds", "(", "mcycles", ")", "*", "1000", ")", ")" ]
helper function to convert megacycles to milliseconds .
train
false
48,222
def are_relatively_prime(a, b): d = gcd(a, b) return (d == 1)
[ "def", "are_relatively_prime", "(", "a", ",", "b", ")", ":", "d", "=", "gcd", "(", "a", ",", "b", ")", "return", "(", "d", "==", "1", ")" ]
returns true if a and b are relatively prime .
train
false
48,223
def get_party_status(doc): status = default_status[doc.doctype] for doctype in status_depends_on[doc.doctype]: filters = get_filters_for(doctype) filters[doc.doctype.lower()] = doc.name if filters: open_count = frappe.get_all(doctype, fields=u'name', filters=filters, limit_page_length=1) if (len(open_count) > 0): status = u'Open' break return status
[ "def", "get_party_status", "(", "doc", ")", ":", "status", "=", "default_status", "[", "doc", ".", "doctype", "]", "for", "doctype", "in", "status_depends_on", "[", "doc", ".", "doctype", "]", ":", "filters", "=", "get_filters_for", "(", "doctype", ")", "filters", "[", "doc", ".", "doctype", ".", "lower", "(", ")", "]", "=", "doc", ".", "name", "if", "filters", ":", "open_count", "=", "frappe", ".", "get_all", "(", "doctype", ",", "fields", "=", "u'name'", ",", "filters", "=", "filters", ",", "limit_page_length", "=", "1", ")", "if", "(", "len", "(", "open_count", ")", ">", "0", ")", ":", "status", "=", "u'Open'", "break", "return", "status" ]
return party status based on open documents .
train
false
48,224
def _make_nets(variables, config, net_assignments): name_to_index = dict(((v.name.split(':')[0], i) for (i, v) in enumerate(variables))) if (net_assignments is None): if (len(config) != 1): raise ValueError('Default net_assignments can only be used if there is a single net config.') with tf.variable_scope('vars_optimizer'): (key, kwargs) = config.items()[0] net = networks.factory(**kwargs) nets = {key: net} keys = [key] subsets = [range(len(variables))] else: nets = {} keys = [] subsets = [] with tf.variable_scope('vars_optimizer'): for (key, names) in net_assignments: if (key in nets): raise ValueError('Repeated netid in net_assigments.') nets[key] = networks.factory(**config[key]) subset = [name_to_index[name] for name in names] keys.append(key) subsets.append(subset) print('Net: {}, Subset: {}'.format(key, subset)) return (nets, keys, subsets)
[ "def", "_make_nets", "(", "variables", ",", "config", ",", "net_assignments", ")", ":", "name_to_index", "=", "dict", "(", "(", "(", "v", ".", "name", ".", "split", "(", "':'", ")", "[", "0", "]", ",", "i", ")", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "variables", ")", ")", ")", "if", "(", "net_assignments", "is", "None", ")", ":", "if", "(", "len", "(", "config", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "'Default net_assignments can only be used if there is a single net config.'", ")", "with", "tf", ".", "variable_scope", "(", "'vars_optimizer'", ")", ":", "(", "key", ",", "kwargs", ")", "=", "config", ".", "items", "(", ")", "[", "0", "]", "net", "=", "networks", ".", "factory", "(", "**", "kwargs", ")", "nets", "=", "{", "key", ":", "net", "}", "keys", "=", "[", "key", "]", "subsets", "=", "[", "range", "(", "len", "(", "variables", ")", ")", "]", "else", ":", "nets", "=", "{", "}", "keys", "=", "[", "]", "subsets", "=", "[", "]", "with", "tf", ".", "variable_scope", "(", "'vars_optimizer'", ")", ":", "for", "(", "key", ",", "names", ")", "in", "net_assignments", ":", "if", "(", "key", "in", "nets", ")", ":", "raise", "ValueError", "(", "'Repeated netid in net_assigments.'", ")", "nets", "[", "key", "]", "=", "networks", ".", "factory", "(", "**", "config", "[", "key", "]", ")", "subset", "=", "[", "name_to_index", "[", "name", "]", "for", "name", "in", "names", "]", "keys", ".", "append", "(", "key", ")", "subsets", ".", "append", "(", "subset", ")", "print", "(", "'Net: {}, Subset: {}'", ".", "format", "(", "key", ",", "subset", ")", ")", "return", "(", "nets", ",", "keys", ",", "subsets", ")" ]
creates the optimizer networks .
train
false
48,225
def list_state_modules(*args): st_ = salt.state.State(__opts__) modules = set() if (not args): for func in st_.states: log.debug('func {0}'.format(func)) modules.add(func.split('.')[0]) return sorted(modules) for module in args: if ('*' in module): for func in fnmatch.filter(st_.states, module): modules.add(func.split('.')[0]) else: for func in st_.states: mod_test = func.split('.')[0] if (mod_test == module): modules.add(mod_test) return sorted(modules)
[ "def", "list_state_modules", "(", "*", "args", ")", ":", "st_", "=", "salt", ".", "state", ".", "State", "(", "__opts__", ")", "modules", "=", "set", "(", ")", "if", "(", "not", "args", ")", ":", "for", "func", "in", "st_", ".", "states", ":", "log", ".", "debug", "(", "'func {0}'", ".", "format", "(", "func", ")", ")", "modules", ".", "add", "(", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "return", "sorted", "(", "modules", ")", "for", "module", "in", "args", ":", "if", "(", "'*'", "in", "module", ")", ":", "for", "func", "in", "fnmatch", ".", "filter", "(", "st_", ".", "states", ",", "module", ")", ":", "modules", ".", "add", "(", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "else", ":", "for", "func", "in", "st_", ".", "states", ":", "mod_test", "=", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", "if", "(", "mod_test", "==", "module", ")", ":", "modules", ".", "add", "(", "mod_test", ")", "return", "sorted", "(", "modules", ")" ]
list the modules loaded on the minion .
train
true
48,226
def get_provide_objects(category): return six.itervalues(_load_provide_objects(category))
[ "def", "get_provide_objects", "(", "category", ")", ":", "return", "six", ".", "itervalues", "(", "_load_provide_objects", "(", "category", ")", ")" ]
get an iterable of provide objects for the given category .
train
false
48,228
def get_auth_handler(host, config, provider, requested_capability=None): ready_handlers = [] auth_handlers = boto.plugin.get_plugin(AuthHandler, requested_capability) for handler in auth_handlers: try: ready_handlers.append(handler(host, config, provider)) except boto.auth_handler.NotReadyToAuthenticate: pass if (not ready_handlers): checked_handlers = auth_handlers names = [handler.__name__ for handler in checked_handlers] raise boto.exception.NoAuthHandlerFound(('No handler was ready to authenticate. %d handlers were checked. %s Check your credentials' % (len(names), str(names)))) return ready_handlers[(-1)]
[ "def", "get_auth_handler", "(", "host", ",", "config", ",", "provider", ",", "requested_capability", "=", "None", ")", ":", "ready_handlers", "=", "[", "]", "auth_handlers", "=", "boto", ".", "plugin", ".", "get_plugin", "(", "AuthHandler", ",", "requested_capability", ")", "for", "handler", "in", "auth_handlers", ":", "try", ":", "ready_handlers", ".", "append", "(", "handler", "(", "host", ",", "config", ",", "provider", ")", ")", "except", "boto", ".", "auth_handler", ".", "NotReadyToAuthenticate", ":", "pass", "if", "(", "not", "ready_handlers", ")", ":", "checked_handlers", "=", "auth_handlers", "names", "=", "[", "handler", ".", "__name__", "for", "handler", "in", "checked_handlers", "]", "raise", "boto", ".", "exception", ".", "NoAuthHandlerFound", "(", "(", "'No handler was ready to authenticate. %d handlers were checked. %s Check your credentials'", "%", "(", "len", "(", "names", ")", ",", "str", "(", "names", ")", ")", ")", ")", "return", "ready_handlers", "[", "(", "-", "1", ")", "]" ]
finds an authhandler that is ready to authenticate .
train
false
48,231
def CAN_ASSIGN(article, user): return _is_staff_for_article(article, user)
[ "def", "CAN_ASSIGN", "(", "article", ",", "user", ")", ":", "return", "_is_staff_for_article", "(", "article", ",", "user", ")" ]
is user allowed to change owner or group of article? .
train
false
48,233
@snippet def subscription_lifecycle(client, to_delete): TOPIC_NAME = ('subscription_lifecycle-%d' % (_millis(),)) SUB_NAME = ('subscription_lifecycle-defaults-%d' % (_millis(),)) topic = client.topic(TOPIC_NAME) topic.create() to_delete.append(topic) subscription = topic.subscription(SUB_NAME) subscription.create() assert subscription.exists() subscription.reload() subscription.delete()
[ "@", "snippet", "def", "subscription_lifecycle", "(", "client", ",", "to_delete", ")", ":", "TOPIC_NAME", "=", "(", "'subscription_lifecycle-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "SUB_NAME", "=", "(", "'subscription_lifecycle-defaults-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "topic", "=", "client", ".", "topic", "(", "TOPIC_NAME", ")", "topic", ".", "create", "(", ")", "to_delete", ".", "append", "(", "topic", ")", "subscription", "=", "topic", ".", "subscription", "(", "SUB_NAME", ")", "subscription", ".", "create", "(", ")", "assert", "subscription", ".", "exists", "(", ")", "subscription", ".", "reload", "(", ")", "subscription", ".", "delete", "(", ")" ]
test lifecycle of a subscription .
train
false
48,234
def load_config_from_stream(stream, reserve_keys=None): config_globals = {} config = bulkloader_parser.load_config(stream, config_globals) importer_classes = [] exporter_classes = [] for transformer in config.transformers: (importer, exporter) = create_transformer_classes(transformer, config_globals, reserve_keys) if importer: importer_classes.append(importer) if exporter: exporter_classes.append(exporter) return (importer_classes, exporter_classes)
[ "def", "load_config_from_stream", "(", "stream", ",", "reserve_keys", "=", "None", ")", ":", "config_globals", "=", "{", "}", "config", "=", "bulkloader_parser", ".", "load_config", "(", "stream", ",", "config_globals", ")", "importer_classes", "=", "[", "]", "exporter_classes", "=", "[", "]", "for", "transformer", "in", "config", ".", "transformers", ":", "(", "importer", ",", "exporter", ")", "=", "create_transformer_classes", "(", "transformer", ",", "config_globals", ",", "reserve_keys", ")", "if", "importer", ":", "importer_classes", ".", "append", "(", "importer", ")", "if", "exporter", ":", "exporter_classes", ".", "append", "(", "exporter", ")", "return", "(", "importer_classes", ",", "exporter_classes", ")" ]
parse a bulkloader .
train
false
48,236
@pytest.mark.django_db def test_make_naive_explicit_tz(settings): settings.USE_TZ = True datetime_object = timezone.make_aware(datetime(2016, 1, 2, 21, 52, 25), timezone=pytz.timezone('Europe/Helsinki')) assert timezone.is_aware(datetime_object) naive_datetime = make_naive(datetime_object, tz=pytz.timezone('Asia/Bangkok')) assert timezone.is_naive(naive_datetime) assert (naive_datetime.hour == ((datetime_object.hour + 5) % 24))
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_make_naive_explicit_tz", "(", "settings", ")", ":", "settings", ".", "USE_TZ", "=", "True", "datetime_object", "=", "timezone", ".", "make_aware", "(", "datetime", "(", "2016", ",", "1", ",", "2", ",", "21", ",", "52", ",", "25", ")", ",", "timezone", "=", "pytz", ".", "timezone", "(", "'Europe/Helsinki'", ")", ")", "assert", "timezone", ".", "is_aware", "(", "datetime_object", ")", "naive_datetime", "=", "make_naive", "(", "datetime_object", ",", "tz", "=", "pytz", ".", "timezone", "(", "'Asia/Bangkok'", ")", ")", "assert", "timezone", ".", "is_naive", "(", "naive_datetime", ")", "assert", "(", "naive_datetime", ".", "hour", "==", "(", "(", "datetime_object", ".", "hour", "+", "5", ")", "%", "24", ")", ")" ]
tests datetimes are made naive of the given timezone .
train
false
48,237
def split_css_classes(css_classes): classes_list = text_value(css_classes).split(u' ') return [c for c in classes_list if c]
[ "def", "split_css_classes", "(", "css_classes", ")", ":", "classes_list", "=", "text_value", "(", "css_classes", ")", ".", "split", "(", "u' '", ")", "return", "[", "c", "for", "c", "in", "classes_list", "if", "c", "]" ]
turn string into a list of css classes .
train
true
48,238
def test_no_files_specified(tmpdir, mocked_aws_cf_simple): with tmpdir.as_cwd(): config_path = tmpdir.join('config.yaml') config_path.write(mocked_aws_cf_simple) assert (main(['create']) == 0) assert (main(['wait']) == 0) assert (main(['describe']) == 0) assert (main(['pytest']) == 0) assert (main(['delete']) == 0)
[ "def", "test_no_files_specified", "(", "tmpdir", ",", "mocked_aws_cf_simple", ")", ":", "with", "tmpdir", ".", "as_cwd", "(", ")", ":", "config_path", "=", "tmpdir", ".", "join", "(", "'config.yaml'", ")", "config_path", ".", "write", "(", "mocked_aws_cf_simple", ")", "assert", "(", "main", "(", "[", "'create'", "]", ")", "==", "0", ")", "assert", "(", "main", "(", "[", "'wait'", "]", ")", "==", "0", ")", "assert", "(", "main", "(", "[", "'describe'", "]", ")", "==", "0", ")", "assert", "(", "main", "(", "[", "'pytest'", "]", ")", "==", "0", ")", "assert", "(", "main", "(", "[", "'delete'", "]", ")", "==", "0", ")" ]
ensure typical usage works without specifying config and info file paths .
train
false
48,239
def Synchronized(f): @functools.wraps(f) def NewFunction(self, *args, **kw): with self.lock: return f(self, *args, **kw) return NewFunction
[ "def", "Synchronized", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "NewFunction", "(", "self", ",", "*", "args", ",", "**", "kw", ")", ":", "with", "self", ".", "lock", ":", "return", "f", "(", "self", ",", "*", "args", ",", "**", "kw", ")", "return", "NewFunction" ]
decorator to acquire a mutex around an apiproxystub method .
train
true
48,240
@jit(nopython=nopython, cache=True) def _indiff_mixed_action(payoff_matrix, own_supp, opp_supp, A, b, out): m = payoff_matrix.shape[0] k = len(own_supp) A[:(-1), :(-1)] = payoff_matrix[own_supp, :][:, opp_supp] if is_singular(A): return False sol = np.linalg.solve(A, b) if (sol[:(-1)] <= 0).any(): return False out[:] = sol[:(-1)] val = sol[(-1)] if (k == m): return True own_supp_flags = np.zeros(m, np.bool_) own_supp_flags[own_supp] = True for i in range(m): if (not own_supp_flags[i]): payoff = 0 for j in range(k): payoff += (payoff_matrix[(i, opp_supp[j])] * out[j]) if (payoff > val): return False return True
[ "@", "jit", "(", "nopython", "=", "nopython", ",", "cache", "=", "True", ")", "def", "_indiff_mixed_action", "(", "payoff_matrix", ",", "own_supp", ",", "opp_supp", ",", "A", ",", "b", ",", "out", ")", ":", "m", "=", "payoff_matrix", ".", "shape", "[", "0", "]", "k", "=", "len", "(", "own_supp", ")", "A", "[", ":", "(", "-", "1", ")", ",", ":", "(", "-", "1", ")", "]", "=", "payoff_matrix", "[", "own_supp", ",", ":", "]", "[", ":", ",", "opp_supp", "]", "if", "is_singular", "(", "A", ")", ":", "return", "False", "sol", "=", "np", ".", "linalg", ".", "solve", "(", "A", ",", "b", ")", "if", "(", "sol", "[", ":", "(", "-", "1", ")", "]", "<=", "0", ")", ".", "any", "(", ")", ":", "return", "False", "out", "[", ":", "]", "=", "sol", "[", ":", "(", "-", "1", ")", "]", "val", "=", "sol", "[", "(", "-", "1", ")", "]", "if", "(", "k", "==", "m", ")", ":", "return", "True", "own_supp_flags", "=", "np", ".", "zeros", "(", "m", ",", "np", ".", "bool_", ")", "own_supp_flags", "[", "own_supp", "]", "=", "True", "for", "i", "in", "range", "(", "m", ")", ":", "if", "(", "not", "own_supp_flags", "[", "i", "]", ")", ":", "payoff", "=", "0", "for", "j", "in", "range", "(", "k", ")", ":", "payoff", "+=", "(", "payoff_matrix", "[", "(", "i", ",", "opp_supp", "[", "j", "]", ")", "]", "*", "out", "[", "j", "]", ")", "if", "(", "payoff", ">", "val", ")", ":", "return", "False", "return", "True" ]
given a players payoff matrix payoff_matrix .
train
false
48,242
def _parse_forward(mapping): if (len(mapping.split(':')) > 3): (srcport, destport, protocol, destaddr) = mapping.split(':') else: (srcport, destport, protocol) = mapping.split(':') destaddr = '' return ForwardingMapping(srcport, destport, protocol, destaddr)
[ "def", "_parse_forward", "(", "mapping", ")", ":", "if", "(", "len", "(", "mapping", ".", "split", "(", "':'", ")", ")", ">", "3", ")", ":", "(", "srcport", ",", "destport", ",", "protocol", ",", "destaddr", ")", "=", "mapping", ".", "split", "(", "':'", ")", "else", ":", "(", "srcport", ",", "destport", ",", "protocol", ")", "=", "mapping", ".", "split", "(", "':'", ")", "destaddr", "=", "''", "return", "ForwardingMapping", "(", "srcport", ",", "destport", ",", "protocol", ",", "destaddr", ")" ]
parses a port forwarding statement in the form used by this state: from_port:to_port:protocol[:destination] and returns a forwardingmapping object .
train
true
48,243
def survey_getAllSectionsForTemplate(template_id): sectable = current.s3db.survey_section query = (sectable.template_id == template_id) rows = current.db(query).select(sectable.id, sectable.name, sectable.template_id, sectable.posn, orderby=sectable.posn) sections = [] for sec in rows: sections.append({'section_id': sec.id, 'name': sec.name, 'template_id': sec.template_id, 'posn': sec.posn}) return sections
[ "def", "survey_getAllSectionsForTemplate", "(", "template_id", ")", ":", "sectable", "=", "current", ".", "s3db", ".", "survey_section", "query", "=", "(", "sectable", ".", "template_id", "==", "template_id", ")", "rows", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "sectable", ".", "id", ",", "sectable", ".", "name", ",", "sectable", ".", "template_id", ",", "sectable", ".", "posn", ",", "orderby", "=", "sectable", ".", "posn", ")", "sections", "=", "[", "]", "for", "sec", "in", "rows", ":", "sections", ".", "append", "(", "{", "'section_id'", ":", "sec", ".", "id", ",", "'name'", ":", "sec", ".", "name", ",", "'template_id'", ":", "sec", ".", "template_id", ",", "'posn'", ":", "sec", ".", "posn", "}", ")", "return", "sections" ]
function to return the list of sections for the given template the sections are returned in the order of their position in the template .
train
false
48,244
def floatSecondsFromTimedelta(td): sec = (((td.days * _FLOAT_SECONDS_IN_A_DAY) + (td.seconds * 1.0)) + (td.microseconds / 1000000.0)) return sec
[ "def", "floatSecondsFromTimedelta", "(", "td", ")", ":", "sec", "=", "(", "(", "(", "td", ".", "days", "*", "_FLOAT_SECONDS_IN_A_DAY", ")", "+", "(", "td", ".", "seconds", "*", "1.0", ")", ")", "+", "(", "td", ".", "microseconds", "/", "1000000.0", ")", ")", "return", "sec" ]
convert datetime .
train
false
48,245
def prepare_labels(labels): d = {} count = 0 setlabels = set(labels) for w in setlabels: d[w] = count count += 1 idxlabels = np.array([d[w] for w in labels]) return idxlabels
[ "def", "prepare_labels", "(", "labels", ")", ":", "d", "=", "{", "}", "count", "=", "0", "setlabels", "=", "set", "(", "labels", ")", "for", "w", "in", "setlabels", ":", "d", "[", "w", "]", "=", "count", "count", "+=", "1", "idxlabels", "=", "np", ".", "array", "(", "[", "d", "[", "w", "]", "for", "w", "in", "labels", "]", ")", "return", "idxlabels" ]
process labels to numerical values .
train
false
48,248
def get_python_executable(): executable = sys.executable.replace('pythonw.exe', 'python.exe') if executable.endswith('spyder.exe'): executable = 'python.exe' return executable
[ "def", "get_python_executable", "(", ")", ":", "executable", "=", "sys", ".", "executable", ".", "replace", "(", "'pythonw.exe'", ",", "'python.exe'", ")", "if", "executable", ".", "endswith", "(", "'spyder.exe'", ")", ":", "executable", "=", "'python.exe'", "return", "executable" ]
return path to python executable .
train
false
48,249
def categorize_users(user, source_event, source_node, event, node): remove = utils.users_to_remove(source_event, source_node, node) source_node_subs = compile_subscriptions(source_node, utils.find_subscription_type(source_event)) new_subs = compile_subscriptions(node, utils.find_subscription_type(source_event), event) move = subscriptions_users_union(source_node_subs, new_subs) warn = subscriptions_users_difference(source_node_subs, new_subs) (warn, remove) = subscriptions_node_permissions(node, warn, remove) warn = subscriptions_users_remove_duplicates(warn, new_subs, remove_same=False) move = subscriptions_users_remove_duplicates(move, new_subs, remove_same=False) move = subscriptions_users_remove_duplicates(move, warn, remove_same=True) move = subscriptions_users_remove_duplicates(move, remove, remove_same=True) for notifications in constants.NOTIFICATION_TYPES: user_id = user._id if (user_id in warn[notifications]): warn[notifications].remove(user_id) if (user_id in move[notifications]): move[notifications].remove(user_id) if (user_id in remove[notifications]): remove[notifications].remove(user_id) return (move, warn, remove)
[ "def", "categorize_users", "(", "user", ",", "source_event", ",", "source_node", ",", "event", ",", "node", ")", ":", "remove", "=", "utils", ".", "users_to_remove", "(", "source_event", ",", "source_node", ",", "node", ")", "source_node_subs", "=", "compile_subscriptions", "(", "source_node", ",", "utils", ".", "find_subscription_type", "(", "source_event", ")", ")", "new_subs", "=", "compile_subscriptions", "(", "node", ",", "utils", ".", "find_subscription_type", "(", "source_event", ")", ",", "event", ")", "move", "=", "subscriptions_users_union", "(", "source_node_subs", ",", "new_subs", ")", "warn", "=", "subscriptions_users_difference", "(", "source_node_subs", ",", "new_subs", ")", "(", "warn", ",", "remove", ")", "=", "subscriptions_node_permissions", "(", "node", ",", "warn", ",", "remove", ")", "warn", "=", "subscriptions_users_remove_duplicates", "(", "warn", ",", "new_subs", ",", "remove_same", "=", "False", ")", "move", "=", "subscriptions_users_remove_duplicates", "(", "move", ",", "new_subs", ",", "remove_same", "=", "False", ")", "move", "=", "subscriptions_users_remove_duplicates", "(", "move", ",", "warn", ",", "remove_same", "=", "True", ")", "move", "=", "subscriptions_users_remove_duplicates", "(", "move", ",", "remove", ",", "remove_same", "=", "True", ")", "for", "notifications", "in", "constants", ".", "NOTIFICATION_TYPES", ":", "user_id", "=", "user", ".", "_id", "if", "(", "user_id", "in", "warn", "[", "notifications", "]", ")", ":", "warn", "[", "notifications", "]", ".", "remove", "(", "user_id", ")", "if", "(", "user_id", "in", "move", "[", "notifications", "]", ")", ":", "move", "[", "notifications", "]", ".", "remove", "(", "user_id", ")", "if", "(", "user_id", "in", "remove", "[", "notifications", "]", ")", ":", "remove", "[", "notifications", "]", ".", "remove", "(", "user_id", ")", "return", "(", "move", ",", "warn", ",", "remove", ")" ]
categorize users from a file subscription into three categories .
train
false
48,250
def test_get_layer_monitor_channels(): mlp = MLP(layers=[FlattenerLayer(CompositeLayer('composite', [Linear(10, 'h0', 0.1), Linear(10, 'h1', 0.1)], {0: [1], 1: [0]})), Softmax(5, 'softmax', 0.1)], input_space=CompositeSpace([VectorSpace(15), VectorSpace(20)]), input_source=('features0', 'features1')) dataset = VectorSpacesDataset((np.random.rand(20, 20).astype(theano.config.floatX), np.random.rand(20, 15).astype(theano.config.floatX), np.random.rand(20, 5).astype(theano.config.floatX)), (CompositeSpace([VectorSpace(20), VectorSpace(15), VectorSpace(5)]), ('features1', 'features0', 'targets'))) state_below = mlp.get_input_space().make_theano_batch() targets = mlp.get_target_space().make_theano_batch() mlp.get_layer_monitoring_channels(state_below=state_below, state=None, targets=targets)
[ "def", "test_get_layer_monitor_channels", "(", ")", ":", "mlp", "=", "MLP", "(", "layers", "=", "[", "FlattenerLayer", "(", "CompositeLayer", "(", "'composite'", ",", "[", "Linear", "(", "10", ",", "'h0'", ",", "0.1", ")", ",", "Linear", "(", "10", ",", "'h1'", ",", "0.1", ")", "]", ",", "{", "0", ":", "[", "1", "]", ",", "1", ":", "[", "0", "]", "}", ")", ")", ",", "Softmax", "(", "5", ",", "'softmax'", ",", "0.1", ")", "]", ",", "input_space", "=", "CompositeSpace", "(", "[", "VectorSpace", "(", "15", ")", ",", "VectorSpace", "(", "20", ")", "]", ")", ",", "input_source", "=", "(", "'features0'", ",", "'features1'", ")", ")", "dataset", "=", "VectorSpacesDataset", "(", "(", "np", ".", "random", ".", "rand", "(", "20", ",", "20", ")", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")", ",", "np", ".", "random", ".", "rand", "(", "20", ",", "15", ")", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")", ",", "np", ".", "random", ".", "rand", "(", "20", ",", "5", ")", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")", ")", ",", "(", "CompositeSpace", "(", "[", "VectorSpace", "(", "20", ")", ",", "VectorSpace", "(", "15", ")", ",", "VectorSpace", "(", "5", ")", "]", ")", ",", "(", "'features1'", ",", "'features0'", ",", "'targets'", ")", ")", ")", "state_below", "=", "mlp", ".", "get_input_space", "(", ")", ".", "make_theano_batch", "(", ")", "targets", "=", "mlp", ".", "get_target_space", "(", ")", ".", "make_theano_batch", "(", ")", "mlp", ".", "get_layer_monitoring_channels", "(", "state_below", "=", "state_below", ",", "state", "=", "None", ",", "targets", "=", "targets", ")" ]
create a mlp with multiple layer types and get layer monitoring channels for mlp .
train
false
48,251
def followee_count(context, data_dict): model = context['model'] followee_users = _followee_count(context, data_dict, model.UserFollowingUser) context['skip_validation'] = True followee_datasets = _followee_count(context, data_dict, model.UserFollowingDataset) followee_groups = _followee_count(context, data_dict, model.UserFollowingGroup) return sum((followee_users, followee_datasets, followee_groups))
[ "def", "followee_count", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "followee_users", "=", "_followee_count", "(", "context", ",", "data_dict", ",", "model", ".", "UserFollowingUser", ")", "context", "[", "'skip_validation'", "]", "=", "True", "followee_datasets", "=", "_followee_count", "(", "context", ",", "data_dict", ",", "model", ".", "UserFollowingDataset", ")", "followee_groups", "=", "_followee_count", "(", "context", ",", "data_dict", ",", "model", ".", "UserFollowingGroup", ")", "return", "sum", "(", "(", "followee_users", ",", "followee_datasets", ",", "followee_groups", ")", ")" ]
return the number of objects that are followed by the given user .
train
false
48,252
def clear_existing_modulestores(): global _MIXED_MODULESTORE _MIXED_MODULESTORE = None
[ "def", "clear_existing_modulestores", "(", ")", ":", "global", "_MIXED_MODULESTORE", "_MIXED_MODULESTORE", "=", "None" ]
clear the existing modulestore instances .
train
false
48,255
def _get_logger(self): try: logger = self.logger except AttributeError: return _logger else: if (logger is None): logger = _logger return logger
[ "def", "_get_logger", "(", "self", ")", ":", "try", ":", "logger", "=", "self", ".", "logger", "except", "AttributeError", ":", "return", "_logger", "else", ":", "if", "(", "logger", "is", "None", ")", ":", "logger", "=", "_logger", "return", "logger" ]
find the specific or default logger .
train
false
48,256
def Call(func_name, args=None, prefix=None): node = Node(syms.power, [func_name, ArgList(args)]) if (prefix is not None): node.prefix = prefix return node
[ "def", "Call", "(", "func_name", ",", "args", "=", "None", ",", "prefix", "=", "None", ")", ":", "node", "=", "Node", "(", "syms", ".", "power", ",", "[", "func_name", ",", "ArgList", "(", "args", ")", "]", ")", "if", "(", "prefix", "is", "not", "None", ")", ":", "node", ".", "prefix", "=", "prefix", "return", "node" ]
a function call .
train
true
48,257
def namedAny(name): if (not name): raise InvalidName('Empty module name') names = name.split('.') if ('' in names): raise InvalidName(("name must be a string giving a '.'-separated list of Python identifiers, not %r" % (name,))) topLevelPackage = None moduleNames = names[:] while (not topLevelPackage): if moduleNames: trialname = '.'.join(moduleNames) try: topLevelPackage = _importAndCheckStack(trialname) except _NoModuleFound: moduleNames.pop() elif (len(names) == 1): raise ModuleNotFound(('No module named %r' % (name,))) else: raise ObjectNotFound(('%r does not name an object' % (name,))) obj = topLevelPackage for n in names[1:]: obj = getattr(obj, n) return obj
[ "def", "namedAny", "(", "name", ")", ":", "if", "(", "not", "name", ")", ":", "raise", "InvalidName", "(", "'Empty module name'", ")", "names", "=", "name", ".", "split", "(", "'.'", ")", "if", "(", "''", "in", "names", ")", ":", "raise", "InvalidName", "(", "(", "\"name must be a string giving a '.'-separated list of Python identifiers, not %r\"", "%", "(", "name", ",", ")", ")", ")", "topLevelPackage", "=", "None", "moduleNames", "=", "names", "[", ":", "]", "while", "(", "not", "topLevelPackage", ")", ":", "if", "moduleNames", ":", "trialname", "=", "'.'", ".", "join", "(", "moduleNames", ")", "try", ":", "topLevelPackage", "=", "_importAndCheckStack", "(", "trialname", ")", "except", "_NoModuleFound", ":", "moduleNames", ".", "pop", "(", ")", "elif", "(", "len", "(", "names", ")", "==", "1", ")", ":", "raise", "ModuleNotFound", "(", "(", "'No module named %r'", "%", "(", "name", ",", ")", ")", ")", "else", ":", "raise", "ObjectNotFound", "(", "(", "'%r does not name an object'", "%", "(", "name", ",", ")", ")", ")", "obj", "=", "topLevelPackage", "for", "n", "in", "names", "[", "1", ":", "]", ":", "obj", "=", "getattr", "(", "obj", ",", "n", ")", "return", "obj" ]
retrieve a python object by its fully qualified name from the global python module namespace .
train
true
48,258
def test_only_one_value_log(Chart): chart = Chart(logarithmic=True) chart.add('S', [1]) if (not chart._dual): chart.x_labels = 'single' q = chart.render_pyquery() assert (len(q('.legend')) == 1)
[ "def", "test_only_one_value_log", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", "logarithmic", "=", "True", ")", "chart", ".", "add", "(", "'S'", ",", "[", "1", "]", ")", "if", "(", "not", "chart", ".", "_dual", ")", ":", "chart", ".", "x_labels", "=", "'single'", "q", "=", "chart", ".", "render_pyquery", "(", ")", "assert", "(", "len", "(", "q", "(", "'.legend'", ")", ")", "==", "1", ")" ]
test logarithmic chart rendering with only one value .
train
false
48,259
def Eijk(*args, **kwargs): return LeviCivita(*args, **kwargs)
[ "def", "Eijk", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "LeviCivita", "(", "*", "args", ",", "**", "kwargs", ")" ]
represent the levi-civita symbol .
train
false
48,260
def get_pack_by_ref(pack_ref): pack_db = Pack.get_by_ref(pack_ref) return pack_db
[ "def", "get_pack_by_ref", "(", "pack_ref", ")", ":", "pack_db", "=", "Pack", ".", "get_by_ref", "(", "pack_ref", ")", "return", "pack_db" ]
retrieve packdb by the provided reference .
train
false
48,262
def volume_overlay2(ax, closes, volumes, colorup='k', colordown='r', width=4, alpha=1.0): return volume_overlay(ax, closes[:(-1)], closes[1:], volumes[1:], colorup, colordown, width, alpha)
[ "def", "volume_overlay2", "(", "ax", ",", "closes", ",", "volumes", ",", "colorup", "=", "'k'", ",", "colordown", "=", "'r'", ",", "width", "=", "4", ",", "alpha", "=", "1.0", ")", ":", "return", "volume_overlay", "(", "ax", ",", "closes", "[", ":", "(", "-", "1", ")", "]", ",", "closes", "[", "1", ":", "]", ",", "volumes", "[", "1", ":", "]", ",", "colorup", ",", "colordown", ",", "width", ",", "alpha", ")" ]
add a volume overlay to the current axes .
train
false
48,265
def _sympifyit(arg, retval=None): def deco(func): return __sympifyit(func, arg, retval) return deco
[ "def", "_sympifyit", "(", "arg", ",", "retval", "=", "None", ")", ":", "def", "deco", "(", "func", ")", ":", "return", "__sympifyit", "(", "func", ",", "arg", ",", "retval", ")", "return", "deco" ]
decorator to smartly _sympify function arguments @_sympifyit def add: in add .
train
false
48,266
@requires_duration def fadeout(clip, duration, final_color=None): if (final_color is None): final_color = (0 if clip.ismask else [0, 0, 0]) final_color = np.array(final_color) def fl(gf, t): if ((clip.duration - t) >= duration): return gf(t) else: fading = ((1.0 * (clip.duration - t)) / duration) return ((fading * gf(t)) + ((1 - fading) * final_color)) return clip.fl(fl)
[ "@", "requires_duration", "def", "fadeout", "(", "clip", ",", "duration", ",", "final_color", "=", "None", ")", ":", "if", "(", "final_color", "is", "None", ")", ":", "final_color", "=", "(", "0", "if", "clip", ".", "ismask", "else", "[", "0", ",", "0", ",", "0", "]", ")", "final_color", "=", "np", ".", "array", "(", "final_color", ")", "def", "fl", "(", "gf", ",", "t", ")", ":", "if", "(", "(", "clip", ".", "duration", "-", "t", ")", ">=", "duration", ")", ":", "return", "gf", "(", "t", ")", "else", ":", "fading", "=", "(", "(", "1.0", "*", "(", "clip", ".", "duration", "-", "t", ")", ")", "/", "duration", ")", "return", "(", "(", "fading", "*", "gf", "(", "t", ")", ")", "+", "(", "(", "1", "-", "fading", ")", "*", "final_color", ")", ")", "return", "clip", ".", "fl", "(", "fl", ")" ]
makes the clip progressively fade to some color .
train
false
48,267
@task def build_index_sitemap(results): sitemap_parts = [SITEMAP_START] for result in results: if (result is not None): (locale, names, timestamp) = result for name in names: sitemap_url = absolutify(('/sitemaps/%s/%s' % (locale, name))) sitemap_parts.append((SITEMAP_ELEMENT % (sitemap_url, timestamp))) sitemap_parts.append(SITEMAP_END) index_path = os.path.join(settings.MEDIA_ROOT, 'sitemap.xml') sitemap_tree = etree.fromstringlist(sitemap_parts) with open(index_path, 'w') as index_file: sitemap_tree.getroottree().write(index_file, encoding='utf-8', pretty_print=True)
[ "@", "task", "def", "build_index_sitemap", "(", "results", ")", ":", "sitemap_parts", "=", "[", "SITEMAP_START", "]", "for", "result", "in", "results", ":", "if", "(", "result", "is", "not", "None", ")", ":", "(", "locale", ",", "names", ",", "timestamp", ")", "=", "result", "for", "name", "in", "names", ":", "sitemap_url", "=", "absolutify", "(", "(", "'/sitemaps/%s/%s'", "%", "(", "locale", ",", "name", ")", ")", ")", "sitemap_parts", ".", "append", "(", "(", "SITEMAP_ELEMENT", "%", "(", "sitemap_url", ",", "timestamp", ")", ")", ")", "sitemap_parts", ".", "append", "(", "SITEMAP_END", ")", "index_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "'sitemap.xml'", ")", "sitemap_tree", "=", "etree", ".", "fromstringlist", "(", "sitemap_parts", ")", "with", "open", "(", "index_path", ",", "'w'", ")", "as", "index_file", ":", "sitemap_tree", ".", "getroottree", "(", ")", ".", "write", "(", "index_file", ",", "encoding", "=", "'utf-8'", ",", "pretty_print", "=", "True", ")" ]
a chord callback task that writes a sitemap index file for the given results of :func:~kuma .
train
false
48,268
def test_checkerboard(): data.checkerboard()
[ "def", "test_checkerboard", "(", ")", ":", "data", ".", "checkerboard", "(", ")" ]
test that "checkerboard" image can be loaded .
train
false
48,269
def fourier_series(f, limits=None): f = sympify(f) limits = _process_limits(f, limits) x = limits[0] if (x not in f.free_symbols): return f n = Dummy('n') neg_f = f.subs(x, (- x)) if (f == neg_f): (a0, an) = fourier_cos_seq(f, limits, n) bn = SeqFormula(0, (1, oo)) elif (f == (- neg_f)): a0 = S.Zero an = SeqFormula(0, (1, oo)) bn = fourier_sin_seq(f, limits, n) else: (a0, an) = fourier_cos_seq(f, limits, n) bn = fourier_sin_seq(f, limits, n) return FourierSeries(f, limits, (a0, an, bn))
[ "def", "fourier_series", "(", "f", ",", "limits", "=", "None", ")", ":", "f", "=", "sympify", "(", "f", ")", "limits", "=", "_process_limits", "(", "f", ",", "limits", ")", "x", "=", "limits", "[", "0", "]", "if", "(", "x", "not", "in", "f", ".", "free_symbols", ")", ":", "return", "f", "n", "=", "Dummy", "(", "'n'", ")", "neg_f", "=", "f", ".", "subs", "(", "x", ",", "(", "-", "x", ")", ")", "if", "(", "f", "==", "neg_f", ")", ":", "(", "a0", ",", "an", ")", "=", "fourier_cos_seq", "(", "f", ",", "limits", ",", "n", ")", "bn", "=", "SeqFormula", "(", "0", ",", "(", "1", ",", "oo", ")", ")", "elif", "(", "f", "==", "(", "-", "neg_f", ")", ")", ":", "a0", "=", "S", ".", "Zero", "an", "=", "SeqFormula", "(", "0", ",", "(", "1", ",", "oo", ")", ")", "bn", "=", "fourier_sin_seq", "(", "f", ",", "limits", ",", "n", ")", "else", ":", "(", "a0", ",", "an", ")", "=", "fourier_cos_seq", "(", "f", ",", "limits", ",", "n", ")", "bn", "=", "fourier_sin_seq", "(", "f", ",", "limits", ",", "n", ")", "return", "FourierSeries", "(", "f", ",", "limits", ",", "(", "a0", ",", "an", ",", "bn", ")", ")" ]
computes fourier sine/cosine series expansion .
train
false
48,270
def notfound(): ctx.status = '404 Not Found' header('Content-Type', 'text/html') return output('not found')
[ "def", "notfound", "(", ")", ":", "ctx", ".", "status", "=", "'404 Not Found'", "header", "(", "'Content-Type'", ",", "'text/html'", ")", "return", "output", "(", "'not found'", ")" ]
returns a 404 not found error .
train
false
48,273
def baseline_type(): assess_tables() return s3_rest_controller()
[ "def", "baseline_type", "(", ")", ":", "assess_tables", "(", ")", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
48,275
@register.simple_tag(name='microsite_css_overrides_file') def microsite_css_overrides_file(): file_path = configuration_helpers.get_value('css_overrides_file', None) if (file_path is not None): return "<link href='{}' rel='stylesheet' type='text/css'>".format(static(file_path)) else: return ''
[ "@", "register", ".", "simple_tag", "(", "name", "=", "'microsite_css_overrides_file'", ")", "def", "microsite_css_overrides_file", "(", ")", ":", "file_path", "=", "configuration_helpers", ".", "get_value", "(", "'css_overrides_file'", ",", "None", ")", "if", "(", "file_path", "is", "not", "None", ")", ":", "return", "\"<link href='{}' rel='stylesheet' type='text/css'>\"", ".", "format", "(", "static", "(", "file_path", ")", ")", "else", ":", "return", "''" ]
django template tag that outputs the css import for a: {% microsite_css_overrides_file %} .
train
false
48,277
def json_dict_unicode_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'): if isinstance(d, text_type): return to_bytes(d, encoding=encoding, errors=errors) elif isinstance(d, dict): return dict(map(json_dict_unicode_to_bytes, iteritems(d), repeat(encoding), repeat(errors))) elif isinstance(d, list): return list(map(json_dict_unicode_to_bytes, d, repeat(encoding), repeat(errors))) elif isinstance(d, tuple): return tuple(map(json_dict_unicode_to_bytes, d, repeat(encoding), repeat(errors))) else: return d
[ "def", "json_dict_unicode_to_bytes", "(", "d", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'surrogate_or_strict'", ")", ":", "if", "isinstance", "(", "d", ",", "text_type", ")", ":", "return", "to_bytes", "(", "d", ",", "encoding", "=", "encoding", ",", "errors", "=", "errors", ")", "elif", "isinstance", "(", "d", ",", "dict", ")", ":", "return", "dict", "(", "map", "(", "json_dict_unicode_to_bytes", ",", "iteritems", "(", "d", ")", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "elif", "isinstance", "(", "d", ",", "list", ")", ":", "return", "list", "(", "map", "(", "json_dict_unicode_to_bytes", ",", "d", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "elif", "isinstance", "(", "d", ",", "tuple", ")", ":", "return", "tuple", "(", "map", "(", "json_dict_unicode_to_bytes", ",", "d", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "else", ":", "return", "d" ]
recursively convert dict keys and values to byte str specialized for json return because this only handles .
train
false
48,278
def internalcode(f): internal_code.add(f.__code__) return f
[ "def", "internalcode", "(", "f", ")", ":", "internal_code", ".", "add", "(", "f", ".", "__code__", ")", "return", "f" ]
marks the function as internally used .
train
false
48,279
def post_listing_to_slack(sc, listing): desc = '{0} | {1} | {2} | {3} | <{4}>'.format(listing['area'], listing['price'], listing['bart_dist'], listing['name'], listing['url']) sc.api_call('chat.postMessage', channel=settings.SLACK_CHANNEL, text=desc, username='pybot', icon_emoji=':robot_face:')
[ "def", "post_listing_to_slack", "(", "sc", ",", "listing", ")", ":", "desc", "=", "'{0} | {1} | {2} | {3} | <{4}>'", ".", "format", "(", "listing", "[", "'area'", "]", ",", "listing", "[", "'price'", "]", ",", "listing", "[", "'bart_dist'", "]", ",", "listing", "[", "'name'", "]", ",", "listing", "[", "'url'", "]", ")", "sc", ".", "api_call", "(", "'chat.postMessage'", ",", "channel", "=", "settings", ".", "SLACK_CHANNEL", ",", "text", "=", "desc", ",", "username", "=", "'pybot'", ",", "icon_emoji", "=", "':robot_face:'", ")" ]
posts the listing to slack .
train
false
48,280
def parse_items(fp): with open_file(fp, 'U') as f: items = f.read().strip('\n').split('\n') if (items == ['']): items = [] return items
[ "def", "parse_items", "(", "fp", ")", ":", "with", "open_file", "(", "fp", ",", "'U'", ")", "as", "f", ":", "items", "=", "f", ".", "read", "(", ")", ".", "strip", "(", "'\\n'", ")", ".", "split", "(", "'\\n'", ")", "if", "(", "items", "==", "[", "''", "]", ")", ":", "items", "=", "[", "]", "return", "items" ]
parse items from a file where each item is in a different line parameters fp : str/bytes/unicode string or file-like filepath or file-like object to parse .
train
false
48,282
def merge_setting(request_setting, session_setting, dict_class=OrderedDict): if (session_setting is None): return request_setting if (request_setting is None): return session_setting if (not (isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping))): return request_setting merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) for (k, v) in request_setting.items(): if (v is None): del merged_setting[k] merged_setting = dict(((k, v) for (k, v) in merged_setting.items() if (v is not None))) return merged_setting
[ "def", "merge_setting", "(", "request_setting", ",", "session_setting", ",", "dict_class", "=", "OrderedDict", ")", ":", "if", "(", "session_setting", "is", "None", ")", ":", "return", "request_setting", "if", "(", "request_setting", "is", "None", ")", ":", "return", "session_setting", "if", "(", "not", "(", "isinstance", "(", "session_setting", ",", "Mapping", ")", "and", "isinstance", "(", "request_setting", ",", "Mapping", ")", ")", ")", ":", "return", "request_setting", "merged_setting", "=", "dict_class", "(", "to_key_val_list", "(", "session_setting", ")", ")", "merged_setting", ".", "update", "(", "to_key_val_list", "(", "request_setting", ")", ")", "for", "(", "k", ",", "v", ")", "in", "request_setting", ".", "items", "(", ")", ":", "if", "(", "v", "is", "None", ")", ":", "del", "merged_setting", "[", "k", "]", "merged_setting", "=", "dict", "(", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "merged_setting", ".", "items", "(", ")", "if", "(", "v", "is", "not", "None", ")", ")", ")", "return", "merged_setting" ]
determines appropriate setting for a given request .
train
true
48,284
def format_allowed_section(allowed): if (allowed.count(':') == 0): protocol = allowed ports = [] elif (allowed.count(':') == 1): (protocol, ports) = allowed.split(':') else: return [] if ports.count(','): ports = ports.split(',') else: ports = [ports] return_val = {'IPProtocol': protocol} if ports: return_val['ports'] = ports return return_val
[ "def", "format_allowed_section", "(", "allowed", ")", ":", "if", "(", "allowed", ".", "count", "(", "':'", ")", "==", "0", ")", ":", "protocol", "=", "allowed", "ports", "=", "[", "]", "elif", "(", "allowed", ".", "count", "(", "':'", ")", "==", "1", ")", ":", "(", "protocol", ",", "ports", ")", "=", "allowed", ".", "split", "(", "':'", ")", "else", ":", "return", "[", "]", "if", "ports", ".", "count", "(", "','", ")", ":", "ports", "=", "ports", ".", "split", "(", "','", ")", "else", ":", "ports", "=", "[", "ports", "]", "return_val", "=", "{", "'IPProtocol'", ":", "protocol", "}", "if", "ports", ":", "return_val", "[", "'ports'", "]", "=", "ports", "return", "return_val" ]
format each section of the allowed list .
train
false
48,285
def egg_link_path(dist): return (os.path.join(site_packages, dist.project_name) + '.egg-link')
[ "def", "egg_link_path", "(", "dist", ")", ":", "return", "(", "os", ".", "path", ".", "join", "(", "site_packages", ",", "dist", ".", "project_name", ")", "+", "'.egg-link'", ")" ]
return the path where wed expect to find a .
train
false
48,286
def test_accumulate_normals(): n_pts = int(160000.0) n_tris = int(320000.0) tris = (rng.rand(n_tris, 1) * (n_pts - 2)).astype(int) tris = np.c_[(tris, (tris + 1), (tris + 2))] tri_nn = rng.rand(n_tris, 3) this = dict(tris=tris, np=n_pts, ntri=n_tris, tri_nn=tri_nn) this['nn'] = np.zeros((this['np'], 3)) for p in range(this['ntri']): verts = this['tris'][p] this['nn'][verts, :] += this['tri_nn'][p, :] nn = _accumulate_normals(this['tris'], this['tri_nn'], this['np']) assert_allclose(nn, this['nn'], rtol=1e-07, atol=1e-07)
[ "def", "test_accumulate_normals", "(", ")", ":", "n_pts", "=", "int", "(", "160000.0", ")", "n_tris", "=", "int", "(", "320000.0", ")", "tris", "=", "(", "rng", ".", "rand", "(", "n_tris", ",", "1", ")", "*", "(", "n_pts", "-", "2", ")", ")", ".", "astype", "(", "int", ")", "tris", "=", "np", ".", "c_", "[", "(", "tris", ",", "(", "tris", "+", "1", ")", ",", "(", "tris", "+", "2", ")", ")", "]", "tri_nn", "=", "rng", ".", "rand", "(", "n_tris", ",", "3", ")", "this", "=", "dict", "(", "tris", "=", "tris", ",", "np", "=", "n_pts", ",", "ntri", "=", "n_tris", ",", "tri_nn", "=", "tri_nn", ")", "this", "[", "'nn'", "]", "=", "np", ".", "zeros", "(", "(", "this", "[", "'np'", "]", ",", "3", ")", ")", "for", "p", "in", "range", "(", "this", "[", "'ntri'", "]", ")", ":", "verts", "=", "this", "[", "'tris'", "]", "[", "p", "]", "this", "[", "'nn'", "]", "[", "verts", ",", ":", "]", "+=", "this", "[", "'tri_nn'", "]", "[", "p", ",", ":", "]", "nn", "=", "_accumulate_normals", "(", "this", "[", "'tris'", "]", ",", "this", "[", "'tri_nn'", "]", ",", "this", "[", "'np'", "]", ")", "assert_allclose", "(", "nn", ",", "this", "[", "'nn'", "]", ",", "rtol", "=", "1e-07", ",", "atol", "=", "1e-07", ")" ]
test efficient normal accumulation for surfaces .
train
false
48,289
def list_dict(l): d = {} for i in l: d[i] = None return d
[ "def", "list_dict", "(", "l", ")", ":", "d", "=", "{", "}", "for", "i", "in", "l", ":", "d", "[", "i", "]", "=", "None", "return", "d" ]
return a dictionary with all items of l being the keys of the dictionary .
train
false
48,290
def elem_quote(member, nonquote=True, stringify=False, encoding=None): if (not isinstance(member, basestring)): if stringify: member = str(member) else: raise TypeError(('Can only quote strings. "%s"' % str(member))) if (encoding and isinstance(member, str)): member = unicode(member, encoding) if ('\n' in member): raise QuoteError(('Multiline values can\'t be quoted.\n"%s"' % str(member))) if (nonquote and (badchars.match(member) is not None)): return member elif (member.find('"') == (-1)): return ('"%s"' % member) elif (member.find("'") == (-1)): return ("'%s'" % member) else: raise QuoteError(('Value can\'t be quoted : "%s"' % member))
[ "def", "elem_quote", "(", "member", ",", "nonquote", "=", "True", ",", "stringify", "=", "False", ",", "encoding", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "member", ",", "basestring", ")", ")", ":", "if", "stringify", ":", "member", "=", "str", "(", "member", ")", "else", ":", "raise", "TypeError", "(", "(", "'Can only quote strings. \"%s\"'", "%", "str", "(", "member", ")", ")", ")", "if", "(", "encoding", "and", "isinstance", "(", "member", ",", "str", ")", ")", ":", "member", "=", "unicode", "(", "member", ",", "encoding", ")", "if", "(", "'\\n'", "in", "member", ")", ":", "raise", "QuoteError", "(", "(", "'Multiline values can\\'t be quoted.\\n\"%s\"'", "%", "str", "(", "member", ")", ")", ")", "if", "(", "nonquote", "and", "(", "badchars", ".", "match", "(", "member", ")", "is", "not", "None", ")", ")", ":", "return", "member", "elif", "(", "member", ".", "find", "(", "'\"'", ")", "==", "(", "-", "1", ")", ")", ":", "return", "(", "'\"%s\"'", "%", "member", ")", "elif", "(", "member", ".", "find", "(", "\"'\"", ")", "==", "(", "-", "1", ")", ")", ":", "return", "(", "\"'%s'\"", "%", "member", ")", "else", ":", "raise", "QuoteError", "(", "(", "'Value can\\'t be quoted : \"%s\"'", "%", "member", ")", ")" ]
simple method to add the most appropriate quote to an element - either single quotes or double quotes .
train
false
48,291
def optimise(): with cd('/home/web2py/'): run('python web2py.py -S eden -M -R applications/eden/static/scripts/tools/indexes.py', pty=True) run('python web2py.py -S eden -M -R applications/eden/static/scripts/tools/compile.py', pty=True)
[ "def", "optimise", "(", ")", ":", "with", "cd", "(", "'/home/web2py/'", ")", ":", "run", "(", "'python web2py.py -S eden -M -R applications/eden/static/scripts/tools/indexes.py'", ",", "pty", "=", "True", ")", "run", "(", "'python web2py.py -S eden -M -R applications/eden/static/scripts/tools/compile.py'", ",", "pty", "=", "True", ")" ]
apply optimisation .
train
false
48,292
def get_humidity(): return _sensehat.get_humidity()
[ "def", "get_humidity", "(", ")", ":", "return", "_sensehat", ".", "get_humidity", "(", ")" ]
get the percentage of relative humidity from the humidity sensor .
train
false
48,293
def returns_typeclass_list(method): def func(self, *args, **kwargs): self.__doc__ = method.__doc__ raw_queryset = kwargs.pop('raw_queryset', False) result = method(self, *args, **kwargs) if raw_queryset: return result else: return list(result) return update_wrapper(func, method)
[ "def", "returns_typeclass_list", "(", "method", ")", ":", "def", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "self", ".", "__doc__", "=", "method", ".", "__doc__", "raw_queryset", "=", "kwargs", ".", "pop", "(", "'raw_queryset'", ",", "False", ")", "result", "=", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "if", "raw_queryset", ":", "return", "result", "else", ":", "return", "list", "(", "result", ")", "return", "update_wrapper", "(", "func", ",", "method", ")" ]
decorator: always returns a list .
train
false
48,295
def is_user_eligible_for_credit(username, course_key): return CreditEligibility.is_user_eligible_for_credit(course_key, username)
[ "def", "is_user_eligible_for_credit", "(", "username", ",", "course_key", ")", ":", "return", "CreditEligibility", ".", "is_user_eligible_for_credit", "(", "course_key", ",", "username", ")" ]
returns a boolean indicating if the user is eligible for credit for the given course args: username: the identifier for user course_key : the identifier for course returns: true if user is eligible for the course else false .
train
false
48,296
def roll(x, shift, axis=None): if (axis is None): if (x.ndim > 1): y = x.flatten() return roll(y, shift, axis=0).reshape(x.shape) else: axis = 0 if (axis < 0): axis += x.ndim shift = (shift % x.shape[axis]) allslice = slice(None) front_slice = slice((- shift), None) front_list = ((([allslice] * axis) + [front_slice]) + ([allslice] * ((x.ndim - axis) - 1))) end_slice = slice(0, (- shift)) end_list = ((([allslice] * axis) + [end_slice]) + ([allslice] * ((x.ndim - axis) - 1))) return join(axis, x.__getitem__(tuple(front_list)), x.__getitem__(tuple(end_list)))
[ "def", "roll", "(", "x", ",", "shift", ",", "axis", "=", "None", ")", ":", "if", "(", "axis", "is", "None", ")", ":", "if", "(", "x", ".", "ndim", ">", "1", ")", ":", "y", "=", "x", ".", "flatten", "(", ")", "return", "roll", "(", "y", ",", "shift", ",", "axis", "=", "0", ")", ".", "reshape", "(", "x", ".", "shape", ")", "else", ":", "axis", "=", "0", "if", "(", "axis", "<", "0", ")", ":", "axis", "+=", "x", ".", "ndim", "shift", "=", "(", "shift", "%", "x", ".", "shape", "[", "axis", "]", ")", "allslice", "=", "slice", "(", "None", ")", "front_slice", "=", "slice", "(", "(", "-", "shift", ")", ",", "None", ")", "front_list", "=", "(", "(", "(", "[", "allslice", "]", "*", "axis", ")", "+", "[", "front_slice", "]", ")", "+", "(", "[", "allslice", "]", "*", "(", "(", "x", ".", "ndim", "-", "axis", ")", "-", "1", ")", ")", ")", "end_slice", "=", "slice", "(", "0", ",", "(", "-", "shift", ")", ")", "end_list", "=", "(", "(", "(", "[", "allslice", "]", "*", "axis", ")", "+", "[", "end_slice", "]", ")", "+", "(", "[", "allslice", "]", "*", "(", "(", "x", ".", "ndim", "-", "axis", ")", "-", "1", ")", ")", ")", "return", "join", "(", "axis", ",", "x", ".", "__getitem__", "(", "tuple", "(", "front_list", ")", ")", ",", "x", ".", "__getitem__", "(", "tuple", "(", "end_list", ")", ")", ")" ]
convenience function to roll tensortypes along the given axis .
train
false
48,297
def vargenerate(ar, u, initvalues=None): (nlags, nvars, nvarsex) = ar.shape nlagsm1 = (nlags - 1) nobs = u.shape[0] if (nvars != nvarsex): print('exogenous variables not implemented not tested') if (u.shape[1] != nvars): raise ValueError('u needs to have nvars columns') if (initvalues is None): sar = np.zeros(((nobs + nlagsm1), nvars)) start = nlagsm1 else: start = max(nlagsm1, initvalues.shape[0]) sar = np.zeros(((nobs + start), nvars)) sar[(start - initvalues.shape[0]):start] = initvalues sar[start:] = u for i in range(start, (start + nobs)): for p in range(1, nlags): sar[i] += np.dot(sar[(i - p), :], (- ar[p])) return sar
[ "def", "vargenerate", "(", "ar", ",", "u", ",", "initvalues", "=", "None", ")", ":", "(", "nlags", ",", "nvars", ",", "nvarsex", ")", "=", "ar", ".", "shape", "nlagsm1", "=", "(", "nlags", "-", "1", ")", "nobs", "=", "u", ".", "shape", "[", "0", "]", "if", "(", "nvars", "!=", "nvarsex", ")", ":", "print", "(", "'exogenous variables not implemented not tested'", ")", "if", "(", "u", ".", "shape", "[", "1", "]", "!=", "nvars", ")", ":", "raise", "ValueError", "(", "'u needs to have nvars columns'", ")", "if", "(", "initvalues", "is", "None", ")", ":", "sar", "=", "np", ".", "zeros", "(", "(", "(", "nobs", "+", "nlagsm1", ")", ",", "nvars", ")", ")", "start", "=", "nlagsm1", "else", ":", "start", "=", "max", "(", "nlagsm1", ",", "initvalues", ".", "shape", "[", "0", "]", ")", "sar", "=", "np", ".", "zeros", "(", "(", "(", "nobs", "+", "start", ")", ",", "nvars", ")", ")", "sar", "[", "(", "start", "-", "initvalues", ".", "shape", "[", "0", "]", ")", ":", "start", "]", "=", "initvalues", "sar", "[", "start", ":", "]", "=", "u", "for", "i", "in", "range", "(", "start", ",", "(", "start", "+", "nobs", ")", ")", ":", "for", "p", "in", "range", "(", "1", ",", "nlags", ")", ":", "sar", "[", "i", "]", "+=", "np", ".", "dot", "(", "sar", "[", "(", "i", "-", "p", ")", ",", ":", "]", ",", "(", "-", "ar", "[", "p", "]", ")", ")", "return", "sar" ]
generate an var process with errors u similar to gauss uses loop parameters ar : array matrix lagpolynomial u : array exogenous variable .
train
false
48,299
def canBeNumeric(inStr): try: float(inStr) return True except Exception: return False
[ "def", "canBeNumeric", "(", "inStr", ")", ":", "try", ":", "float", "(", "inStr", ")", "return", "True", "except", "Exception", ":", "return", "False" ]
determines whether the input can be converted to a float (using a try: float) .
train
false
48,300
def _get_base_url_and_installer_for_distro(distribution, build_server, branch): package = distribution if is_centos_or_rhel(distribution): installer = install_commands_yum elif is_ubuntu(distribution): installer = install_commands_ubuntu else: raise UnsupportedDistribution() if branch: result_path = posixpath.join('results/omnibus/', branch, package) base_url = urljoin(build_server, result_path) else: base_url = None return (base_url, installer)
[ "def", "_get_base_url_and_installer_for_distro", "(", "distribution", ",", "build_server", ",", "branch", ")", ":", "package", "=", "distribution", "if", "is_centos_or_rhel", "(", "distribution", ")", ":", "installer", "=", "install_commands_yum", "elif", "is_ubuntu", "(", "distribution", ")", ":", "installer", "=", "install_commands_ubuntu", "else", ":", "raise", "UnsupportedDistribution", "(", ")", "if", "branch", ":", "result_path", "=", "posixpath", ".", "join", "(", "'results/omnibus/'", ",", "branch", ",", "package", ")", "base_url", "=", "urljoin", "(", "build_server", ",", "result_path", ")", "else", ":", "base_url", "=", "None", "return", "(", "base_url", ",", "installer", ")" ]
get built artifact base url and installer for a given distribution .
train
false
48,301
def bind_sqlalchemy(provider, session, user=None, client=None, token=None, grant=None, current_user=None): if user: user_binding = UserBinding(user, session) provider.usergetter(user_binding.get) if client: client_binding = ClientBinding(client, session) provider.clientgetter(client_binding.get) if token: token_binding = TokenBinding(token, session, current_user) provider.tokengetter(token_binding.get) provider.tokensetter(token_binding.set) if grant: if (not current_user): raise ValueError('`current_user` is requiredfor Grant Binding') grant_binding = GrantBinding(grant, session, current_user) provider.grantgetter(grant_binding.get) provider.grantsetter(grant_binding.set)
[ "def", "bind_sqlalchemy", "(", "provider", ",", "session", ",", "user", "=", "None", ",", "client", "=", "None", ",", "token", "=", "None", ",", "grant", "=", "None", ",", "current_user", "=", "None", ")", ":", "if", "user", ":", "user_binding", "=", "UserBinding", "(", "user", ",", "session", ")", "provider", ".", "usergetter", "(", "user_binding", ".", "get", ")", "if", "client", ":", "client_binding", "=", "ClientBinding", "(", "client", ",", "session", ")", "provider", ".", "clientgetter", "(", "client_binding", ".", "get", ")", "if", "token", ":", "token_binding", "=", "TokenBinding", "(", "token", ",", "session", ",", "current_user", ")", "provider", ".", "tokengetter", "(", "token_binding", ".", "get", ")", "provider", ".", "tokensetter", "(", "token_binding", ".", "set", ")", "if", "grant", ":", "if", "(", "not", "current_user", ")", ":", "raise", "ValueError", "(", "'`current_user` is requiredfor Grant Binding'", ")", "grant_binding", "=", "GrantBinding", "(", "grant", ",", "session", ",", "current_user", ")", "provider", ".", "grantgetter", "(", "grant_binding", ".", "get", ")", "provider", ".", "grantsetter", "(", "grant_binding", ".", "set", ")" ]
configures the given :class:oauth2provider instance with the required getters and setters for persistence with sqlalchemy .
train
true
48,302
@pytest.fixture() def proc(qtbot, caplog): p = guiprocess.GUIProcess('testprocess') (yield p) if (p._proc.state() == QProcess.Running): with caplog.at_level(logging.ERROR): with qtbot.waitSignal(p.finished, timeout=10000, raising=False) as blocker: p._proc.terminate() if (not blocker.signal_triggered): p._proc.kill()
[ "@", "pytest", ".", "fixture", "(", ")", "def", "proc", "(", "qtbot", ",", "caplog", ")", ":", "p", "=", "guiprocess", ".", "GUIProcess", "(", "'testprocess'", ")", "(", "yield", "p", ")", "if", "(", "p", ".", "_proc", ".", "state", "(", ")", "==", "QProcess", ".", "Running", ")", ":", "with", "caplog", ".", "at_level", "(", "logging", ".", "ERROR", ")", ":", "with", "qtbot", ".", "waitSignal", "(", "p", ".", "finished", ",", "timeout", "=", "10000", ",", "raising", "=", "False", ")", "as", "blocker", ":", "p", ".", "_proc", ".", "terminate", "(", ")", "if", "(", "not", "blocker", ".", "signal_triggered", ")", ":", "p", ".", "_proc", ".", "kill", "(", ")" ]
a fixture providing a guiprocess and cleaning it up after the test .
train
false