id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
6,491
def decode_list(data): rv = [] for item in data: if (isinstance(item, six.text_type) and six.PY2): item = item.encode('utf-8') elif isinstance(item, list): item = decode_list(item) elif isinstance(item, dict): item = decode_dict(item) rv.append(item) return rv
[ "def", "decode_list", "(", "data", ")", ":", "rv", "=", "[", "]", "for", "item", "in", "data", ":", "if", "(", "isinstance", "(", "item", ",", "six", ".", "text_type", ")", "and", "six", ".", "PY2", ")", ":", "item", "=", "item", ".", "encode", "(", "'utf-8'", ")", "elif", "isinstance", "(", "item", ",", "list", ")", ":", "item", "=", "decode_list", "(", "item", ")", "elif", "isinstance", "(", "item", ",", "dict", ")", ":", "item", "=", "decode_dict", "(", "item", ")", "rv", ".", "append", "(", "item", ")", "return", "rv" ]
json decodes as unicode .
train
false
6,492
def check_force_season_folders(pattern=None, multi=None, anime_type=None): if (pattern is None): pattern = sickbeard.NAMING_PATTERN if (anime_type is None): anime_type = sickbeard.NAMING_ANIME valid = (not validate_name(pattern, None, anime_type, file_only=True)) if (multi is not None): valid = (valid or (not validate_name(pattern, multi, anime_type, file_only=True))) return valid
[ "def", "check_force_season_folders", "(", "pattern", "=", "None", ",", "multi", "=", "None", ",", "anime_type", "=", "None", ")", ":", "if", "(", "pattern", "is", "None", ")", ":", "pattern", "=", "sickbeard", ".", "NAMING_PATTERN", "if", "(", "anime_type", "is", "None", ")", ":", "anime_type", "=", "sickbeard", ".", "NAMING_ANIME", "valid", "=", "(", "not", "validate_name", "(", "pattern", ",", "None", ",", "anime_type", ",", "file_only", "=", "True", ")", ")", "if", "(", "multi", "is", "not", "None", ")", ":", "valid", "=", "(", "valid", "or", "(", "not", "validate_name", "(", "pattern", ",", "multi", ",", "anime_type", ",", "file_only", "=", "True", ")", ")", ")", "return", "valid" ]
checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders to be enabled or not .
train
false
6,493
def find_blade_root_dir(working_dir): blade_root_dir = working_dir if blade_root_dir.endswith('/'): blade_root_dir = blade_root_dir[:(-1)] while (blade_root_dir and (blade_root_dir != '/')): if os.path.isfile(os.path.join(blade_root_dir, 'BLADE_ROOT')): break blade_root_dir = os.path.dirname(blade_root_dir) if ((not blade_root_dir) or (blade_root_dir == '/')): console.error_exit("Can't find the file 'BLADE_ROOT' in this or any upper directory.\nBlade need this file as a placeholder to locate the root source directory (aka the directory where you #include start from).\nYou should create it manually at the first time.") return blade_root_dir
[ "def", "find_blade_root_dir", "(", "working_dir", ")", ":", "blade_root_dir", "=", "working_dir", "if", "blade_root_dir", ".", "endswith", "(", "'/'", ")", ":", "blade_root_dir", "=", "blade_root_dir", "[", ":", "(", "-", "1", ")", "]", "while", "(", "blade_root_dir", "and", "(", "blade_root_dir", "!=", "'/'", ")", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "blade_root_dir", ",", "'BLADE_ROOT'", ")", ")", ":", "break", "blade_root_dir", "=", "os", ".", "path", ".", "dirname", "(", "blade_root_dir", ")", "if", "(", "(", "not", "blade_root_dir", ")", "or", "(", "blade_root_dir", "==", "'/'", ")", ")", ":", "console", ".", "error_exit", "(", "\"Can't find the file 'BLADE_ROOT' in this or any upper directory.\\nBlade need this file as a placeholder to locate the root source directory (aka the directory where you #include start from).\\nYou should create it manually at the first time.\"", ")", "return", "blade_root_dir" ]
find_blade_root_dir to find the dir holds the blade_root file .
train
false
6,494
def normalize_version_number(version): return _discover.normalize_version_number(version)
[ "def", "normalize_version_number", "(", "version", ")", ":", "return", "_discover", ".", "normalize_version_number", "(", "version", ")" ]
turn a version representation into a tuple .
train
false
6,496
def bifid_square(key): A = ''.join(uniq(''.join(key))) n = (len(A) ** 0.5) if (n != int(n)): raise ValueError(('Length of alphabet (%s) is not a square number.' % len(A))) n = int(n) f = (lambda i, j: Symbol(A[((n * i) + j)])) rv = Matrix(n, n, f) return rv
[ "def", "bifid_square", "(", "key", ")", ":", "A", "=", "''", ".", "join", "(", "uniq", "(", "''", ".", "join", "(", "key", ")", ")", ")", "n", "=", "(", "len", "(", "A", ")", "**", "0.5", ")", "if", "(", "n", "!=", "int", "(", "n", ")", ")", ":", "raise", "ValueError", "(", "(", "'Length of alphabet (%s) is not a square number.'", "%", "len", "(", "A", ")", ")", ")", "n", "=", "int", "(", "n", ")", "f", "=", "(", "lambda", "i", ",", "j", ":", "Symbol", "(", "A", "[", "(", "(", "n", "*", "i", ")", "+", "j", ")", "]", ")", ")", "rv", "=", "Matrix", "(", "n", ",", "n", ",", "f", ")", "return", "rv" ]
return characters of key arranged in a square .
train
false
6,497
def _cross_val(data, est, cv, n_jobs): try: from sklearn.model_selection import cross_val_score except ImportError: from sklearn.cross_validation import cross_val_score return np.mean(cross_val_score(est, data, cv=cv, n_jobs=n_jobs, scoring=_gaussian_loglik_scorer))
[ "def", "_cross_val", "(", "data", ",", "est", ",", "cv", ",", "n_jobs", ")", ":", "try", ":", "from", "sklearn", ".", "model_selection", "import", "cross_val_score", "except", "ImportError", ":", "from", "sklearn", ".", "cross_validation", "import", "cross_val_score", "return", "np", ".", "mean", "(", "cross_val_score", "(", "est", ",", "data", ",", "cv", "=", "cv", ",", "n_jobs", "=", "n_jobs", ",", "scoring", "=", "_gaussian_loglik_scorer", ")", ")" ]
helper to compute cross validation .
train
false
6,498
def es_to_datetime(value): if ((not value) or isinstance(value, (date, datetime))): return if (len(value) == 26): try: return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') except (TypeError, ValueError): pass elif (len(value) == 19): try: return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') except (TypeError, ValueError): pass elif (len(value) == 10): try: return datetime.strptime(value, '%Y-%m-%d') except (TypeError, ValueError): pass return value
[ "def", "es_to_datetime", "(", "value", ")", ":", "if", "(", "(", "not", "value", ")", "or", "isinstance", "(", "value", ",", "(", "date", ",", "datetime", ")", ")", ")", ":", "return", "if", "(", "len", "(", "value", ")", "==", "26", ")", ":", "try", ":", "return", "datetime", ".", "strptime", "(", "value", ",", "'%Y-%m-%dT%H:%M:%S.%f'", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "pass", "elif", "(", "len", "(", "value", ")", "==", "19", ")", ":", "try", ":", "return", "datetime", ".", "strptime", "(", "value", ",", "'%Y-%m-%dT%H:%M:%S'", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "pass", "elif", "(", "len", "(", "value", ")", "==", "10", ")", ":", "try", ":", "return", "datetime", ".", "strptime", "(", "value", ",", "'%Y-%m-%d'", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "pass", "return", "value" ]
returns a datetime given an elasticsearch date/datetime field .
train
false
6,499
def format_nmds_coords(samples, points, stress): result = [] col_headers = [('NMDS' + str(aa)) for aa in range(1, (points.shape[1] + 1))] result.append(('samples DCTB ' + ' DCTB '.join(col_headers))) for (name, row) in zip(samples, points): result.append(' DCTB '.join(([name] + map(str, row)))) result.append('') result.append((((('stress DCTB ' + str(stress)) + ' DCTB ') + ('0 DCTB ' * (len(col_headers) - 2))) + '0')) result.append(('% variation explained DCTB ' + ('0 DCTB ' * len(col_headers)))) return '\n'.join(result)
[ "def", "format_nmds_coords", "(", "samples", ",", "points", ",", "stress", ")", ":", "result", "=", "[", "]", "col_headers", "=", "[", "(", "'NMDS'", "+", "str", "(", "aa", ")", ")", "for", "aa", "in", "range", "(", "1", ",", "(", "points", ".", "shape", "[", "1", "]", "+", "1", ")", ")", "]", "result", ".", "append", "(", "(", "'samples DCTB '", "+", "' DCTB '", ".", "join", "(", "col_headers", ")", ")", ")", "for", "(", "name", ",", "row", ")", "in", "zip", "(", "samples", ",", "points", ")", ":", "result", ".", "append", "(", "' DCTB '", ".", "join", "(", "(", "[", "name", "]", "+", "map", "(", "str", ",", "row", ")", ")", ")", ")", "result", ".", "append", "(", "''", ")", "result", ".", "append", "(", "(", "(", "(", "(", "'stress DCTB '", "+", "str", "(", "stress", ")", ")", "+", "' DCTB '", ")", "+", "(", "'0 DCTB '", "*", "(", "len", "(", "col_headers", ")", "-", "2", ")", ")", ")", "+", "'0'", ")", ")", "result", ".", "append", "(", "(", "'% variation explained DCTB '", "+", "(", "'0 DCTB '", "*", "len", "(", "col_headers", ")", ")", ")", ")", "return", "'\\n'", ".", "join", "(", "result", ")" ]
samples is list .
train
false
6,500
def get_cookie_header(jar, request): r = MockRequest(request) jar.add_cookie_header(r) return r.get_new_headers().get('Cookie')
[ "def", "get_cookie_header", "(", "jar", ",", "request", ")", ":", "r", "=", "MockRequest", "(", "request", ")", "jar", ".", "add_cookie_header", "(", "r", ")", "return", "r", ".", "get_new_headers", "(", ")", ".", "get", "(", "'Cookie'", ")" ]
produce an appropriate cookie header string to be sent with request .
train
true
6,501
def wrap_info(info): if (info is None): return None else: traceback = info[2] assert isinstance(traceback, types.TracebackType) traceback_id = id(traceback) tracebacktable[traceback_id] = traceback modified_info = (info[0], info[1], traceback_id) return modified_info
[ "def", "wrap_info", "(", "info", ")", ":", "if", "(", "info", "is", "None", ")", ":", "return", "None", "else", ":", "traceback", "=", "info", "[", "2", "]", "assert", "isinstance", "(", "traceback", ",", "types", ".", "TracebackType", ")", "traceback_id", "=", "id", "(", "traceback", ")", "tracebacktable", "[", "traceback_id", "]", "=", "traceback", "modified_info", "=", "(", "info", "[", "0", "]", ",", "info", "[", "1", "]", ",", "traceback_id", ")", "return", "modified_info" ]
replace info[2] .
train
false
6,502
def run_from_args(command, log_command_filter=identity): return Effect(Run.from_args(command, log_command_filter=log_command_filter))
[ "def", "run_from_args", "(", "command", ",", "log_command_filter", "=", "identity", ")", ":", "return", "Effect", "(", "Run", ".", "from_args", "(", "command", ",", "log_command_filter", "=", "log_command_filter", ")", ")" ]
run a command on a remote host .
train
false
6,503
def ISO8601Format(dt): isoStr = dt.strftime('%Y-%m-%dT%H:%M:%S') if dt.microsecond: isoStr += ('.%06d' % dt.microsecond).rstrip('0') if dt.tzinfo: tz = dt.strftime('%z') else: if (time.daylight and time.localtime().tm_isdst): utcOffset_minutes = ((- time.altzone) / 60) else: utcOffset_minutes = ((- time.timezone) / 60) tz = ('%+.2d%.2d' % ((utcOffset_minutes / 60), (abs(utcOffset_minutes) % 60))) if (tz == '+0000'): return (isoStr + 'Z') elif tz: return (((isoStr + tz[:3]) + ':') + tz[3:]) else: return (isoStr + '-00:00')
[ "def", "ISO8601Format", "(", "dt", ")", ":", "isoStr", "=", "dt", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S'", ")", "if", "dt", ".", "microsecond", ":", "isoStr", "+=", "(", "'.%06d'", "%", "dt", ".", "microsecond", ")", ".", "rstrip", "(", "'0'", ")", "if", "dt", ".", "tzinfo", ":", "tz", "=", "dt", ".", "strftime", "(", "'%z'", ")", "else", ":", "if", "(", "time", ".", "daylight", "and", "time", ".", "localtime", "(", ")", ".", "tm_isdst", ")", ":", "utcOffset_minutes", "=", "(", "(", "-", "time", ".", "altzone", ")", "/", "60", ")", "else", ":", "utcOffset_minutes", "=", "(", "(", "-", "time", ".", "timezone", ")", "/", "60", ")", "tz", "=", "(", "'%+.2d%.2d'", "%", "(", "(", "utcOffset_minutes", "/", "60", ")", ",", "(", "abs", "(", "utcOffset_minutes", ")", "%", "60", ")", ")", ")", "if", "(", "tz", "==", "'+0000'", ")", ":", "return", "(", "isoStr", "+", "'Z'", ")", "elif", "tz", ":", "return", "(", "(", "(", "isoStr", "+", "tz", "[", ":", "3", "]", ")", "+", "':'", ")", "+", "tz", "[", "3", ":", "]", ")", "else", ":", "return", "(", "isoStr", "+", "'-00:00'", ")" ]
python datetime isoformat() has the following problems: - leave trailing 0 at the end of microseconds - tz print +00:00 instead of z - missing timezone offset for datetime without tzinfo .
train
true
6,504
def addGroupsToKey(server, activation_key, groups): try: (client, key) = _get_session(server) except Exception as exc: err_msg = 'Exception raised when connecting to spacewalk server ({0}): {1}'.format(server, exc) log.error(err_msg) return {'Error': err_msg} all_groups = client.systemgroup.listAllGroups(key) groupIds = [] for group in all_groups: if (group['name'] in groups): groupIds.append(group['id']) if (client.activationkey.addServerGroups(key, activation_key, groupIds) == 1): return {activation_key: groups} else: return {activation_key: 'Failed to add groups to activation key'}
[ "def", "addGroupsToKey", "(", "server", ",", "activation_key", ",", "groups", ")", ":", "try", ":", "(", "client", ",", "key", ")", "=", "_get_session", "(", "server", ")", "except", "Exception", "as", "exc", ":", "err_msg", "=", "'Exception raised when connecting to spacewalk server ({0}): {1}'", ".", "format", "(", "server", ",", "exc", ")", "log", ".", "error", "(", "err_msg", ")", "return", "{", "'Error'", ":", "err_msg", "}", "all_groups", "=", "client", ".", "systemgroup", ".", "listAllGroups", "(", "key", ")", "groupIds", "=", "[", "]", "for", "group", "in", "all_groups", ":", "if", "(", "group", "[", "'name'", "]", "in", "groups", ")", ":", "groupIds", ".", "append", "(", "group", "[", "'id'", "]", ")", "if", "(", "client", ".", "activationkey", ".", "addServerGroups", "(", "key", ",", "activation_key", ",", "groupIds", ")", "==", "1", ")", ":", "return", "{", "activation_key", ":", "groups", "}", "else", ":", "return", "{", "activation_key", ":", "'Failed to add groups to activation key'", "}" ]
add server groups to a activation key cli example: .
train
true
6,505
def order_property_names(query): (filters, orders) = datastore_index.Normalize(query.filter_list(), query.order_list(), []) orders = _GuessOrders(filters, orders) return set((order.property() for order in orders if (order.property() != '__key__')))
[ "def", "order_property_names", "(", "query", ")", ":", "(", "filters", ",", "orders", ")", "=", "datastore_index", ".", "Normalize", "(", "query", ".", "filter_list", "(", ")", ",", "query", ".", "order_list", "(", ")", ",", "[", "]", ")", "orders", "=", "_GuessOrders", "(", "filters", ",", "orders", ")", "return", "set", "(", "(", "order", ".", "property", "(", ")", "for", "order", "in", "orders", "if", "(", "order", ".", "property", "(", ")", "!=", "'__key__'", ")", ")", ")" ]
generates a list of relevant order properties from the query .
train
false
6,506
def ts_add(ts, td): return dt_to_ts((ts_to_dt(ts) + td))
[ "def", "ts_add", "(", "ts", ",", "td", ")", ":", "return", "dt_to_ts", "(", "(", "ts_to_dt", "(", "ts", ")", "+", "td", ")", ")" ]
allows a timedelta add operation on a string timestamp .
train
false
6,508
def _open_library(config): dbpath = config['library'].as_filename() try: lib = library.Library(dbpath, config['directory'].as_filename(), get_path_formats(), get_replacements()) lib.get_item(0) except (sqlite3.OperationalError, sqlite3.DatabaseError): log.debug(traceback.format_exc()) raise UserError(u'database file {0} could not be opened'.format(util.displayable_path(dbpath))) log.debug(u'library database: {0}\nlibrary directory: {1}'.format(util.displayable_path(lib.path), util.displayable_path(lib.directory))) return lib
[ "def", "_open_library", "(", "config", ")", ":", "dbpath", "=", "config", "[", "'library'", "]", ".", "as_filename", "(", ")", "try", ":", "lib", "=", "library", ".", "Library", "(", "dbpath", ",", "config", "[", "'directory'", "]", ".", "as_filename", "(", ")", ",", "get_path_formats", "(", ")", ",", "get_replacements", "(", ")", ")", "lib", ".", "get_item", "(", "0", ")", "except", "(", "sqlite3", ".", "OperationalError", ",", "sqlite3", ".", "DatabaseError", ")", ":", "log", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "raise", "UserError", "(", "u'database file {0} could not be opened'", ".", "format", "(", "util", ".", "displayable_path", "(", "dbpath", ")", ")", ")", "log", ".", "debug", "(", "u'library database: {0}\\nlibrary directory: {1}'", ".", "format", "(", "util", ".", "displayable_path", "(", "lib", ".", "path", ")", ",", "util", ".", "displayable_path", "(", "lib", ".", "directory", ")", ")", ")", "return", "lib" ]
create a new library instance from the configuration .
train
false
6,509
def to_sax(walker, handler): handler.startDocument() for (prefix, namespace) in prefix_mapping.items(): handler.startPrefixMapping(prefix, namespace) for token in walker: type = token[u'type'] if (type == u'Doctype'): continue elif (type in (u'StartTag', u'EmptyTag')): attrs = AttributesNSImpl(token[u'data'], unadjustForeignAttributes) handler.startElementNS((token[u'namespace'], token[u'name']), token[u'name'], attrs) if (type == u'EmptyTag'): handler.endElementNS((token[u'namespace'], token[u'name']), token[u'name']) elif (type == u'EndTag'): handler.endElementNS((token[u'namespace'], token[u'name']), token[u'name']) elif (type in (u'Characters', u'SpaceCharacters')): handler.characters(token[u'data']) elif (type == u'Comment'): pass else: assert False, u'Unknown token type' for (prefix, namespace) in prefix_mapping.items(): handler.endPrefixMapping(prefix) handler.endDocument()
[ "def", "to_sax", "(", "walker", ",", "handler", ")", ":", "handler", ".", "startDocument", "(", ")", "for", "(", "prefix", ",", "namespace", ")", "in", "prefix_mapping", ".", "items", "(", ")", ":", "handler", ".", "startPrefixMapping", "(", "prefix", ",", "namespace", ")", "for", "token", "in", "walker", ":", "type", "=", "token", "[", "u'type'", "]", "if", "(", "type", "==", "u'Doctype'", ")", ":", "continue", "elif", "(", "type", "in", "(", "u'StartTag'", ",", "u'EmptyTag'", ")", ")", ":", "attrs", "=", "AttributesNSImpl", "(", "token", "[", "u'data'", "]", ",", "unadjustForeignAttributes", ")", "handler", ".", "startElementNS", "(", "(", "token", "[", "u'namespace'", "]", ",", "token", "[", "u'name'", "]", ")", ",", "token", "[", "u'name'", "]", ",", "attrs", ")", "if", "(", "type", "==", "u'EmptyTag'", ")", ":", "handler", ".", "endElementNS", "(", "(", "token", "[", "u'namespace'", "]", ",", "token", "[", "u'name'", "]", ")", ",", "token", "[", "u'name'", "]", ")", "elif", "(", "type", "==", "u'EndTag'", ")", ":", "handler", ".", "endElementNS", "(", "(", "token", "[", "u'namespace'", "]", ",", "token", "[", "u'name'", "]", ")", ",", "token", "[", "u'name'", "]", ")", "elif", "(", "type", "in", "(", "u'Characters'", ",", "u'SpaceCharacters'", ")", ")", ":", "handler", ".", "characters", "(", "token", "[", "u'data'", "]", ")", "elif", "(", "type", "==", "u'Comment'", ")", ":", "pass", "else", ":", "assert", "False", ",", "u'Unknown token type'", "for", "(", "prefix", ",", "namespace", ")", "in", "prefix_mapping", ".", "items", "(", ")", ":", "handler", ".", "endPrefixMapping", "(", "prefix", ")", "handler", ".", "endDocument", "(", ")" ]
call sax-like content handler based on treewalker walker .
train
false
6,510
def set_proxy_bypass(domains, network_service='Ethernet'): servers_str = ' '.join(domains) cmd = 'networksetup -setproxybypassdomains {0} {1}'.format(network_service, servers_str) out = __salt__['cmd.run'](cmd) return ('error' not in out)
[ "def", "set_proxy_bypass", "(", "domains", ",", "network_service", "=", "'Ethernet'", ")", ":", "servers_str", "=", "' '", ".", "join", "(", "domains", ")", "cmd", "=", "'networksetup -setproxybypassdomains {0} {1}'", ".", "format", "(", "network_service", ",", "servers_str", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", "return", "(", "'error'", "not", "in", "out", ")" ]
sets the domains that can bypass the proxy domains an array of domains allowed to bypass the proxy network_service the network service to apply the changes to .
train
true
6,513
def _ColonToEquals(node): return ConvertNodes(node, QueryParser.HAS, QueryParser.EQ, '=')
[ "def", "_ColonToEquals", "(", "node", ")", ":", "return", "ConvertNodes", "(", "node", ",", "QueryParser", ".", "HAS", ",", "QueryParser", ".", "EQ", ",", "'='", ")" ]
transform all has nodes into eq nodes .
train
false
6,514
def get_cache(): return requests.Session().cache
[ "def", "get_cache", "(", ")", ":", "return", "requests", ".", "Session", "(", ")", ".", "cache" ]
return the request cache named name .
train
false
6,516
@with_open_mode('r') @with_sizes('medium') def read_bytewise(f): f.seek(0) while f.read(1): pass
[ "@", "with_open_mode", "(", "'r'", ")", "@", "with_sizes", "(", "'medium'", ")", "def", "read_bytewise", "(", "f", ")", ":", "f", ".", "seek", "(", "0", ")", "while", "f", ".", "read", "(", "1", ")", ":", "pass" ]
read one unit at a time .
train
false
6,517
def _nupicHyperSearchHasErrors(hyperSearchJob): return False
[ "def", "_nupicHyperSearchHasErrors", "(", "hyperSearchJob", ")", ":", "return", "False" ]
check whether any experiments failed in our latest hypersearch parameters: hypersearchjob: _hypersearchjob instance; if none .
train
false
6,518
def skip_openpyxl_gt21(cls): @classmethod def setUpClass(cls): _skip_if_no_openpyxl() import openpyxl ver = openpyxl.__version__ if (not ((LooseVersion(ver) >= LooseVersion('2.0.0')) and (LooseVersion(ver) < LooseVersion('2.2.0')))): raise nose.SkipTest(('openpyxl %s >= 2.2' % str(ver))) cls.setUpClass = setUpClass return cls
[ "def", "skip_openpyxl_gt21", "(", "cls", ")", ":", "@", "classmethod", "def", "setUpClass", "(", "cls", ")", ":", "_skip_if_no_openpyxl", "(", ")", "import", "openpyxl", "ver", "=", "openpyxl", ".", "__version__", "if", "(", "not", "(", "(", "LooseVersion", "(", "ver", ")", ">=", "LooseVersion", "(", "'2.0.0'", ")", ")", "and", "(", "LooseVersion", "(", "ver", ")", "<", "LooseVersion", "(", "'2.2.0'", ")", ")", ")", ")", ":", "raise", "nose", ".", "SkipTest", "(", "(", "'openpyxl %s >= 2.2'", "%", "str", "(", "ver", ")", ")", ")", "cls", ".", "setUpClass", "=", "setUpClass", "return", "cls" ]
skip a testcase instance if openpyxl >= 2 .
train
false
6,519
def read_reject_parameters(fname): try: with open(fname, 'r') as f: lines = f.readlines() except: raise ValueError(('Error while reading %s' % fname)) reject_names = ['gradReject', 'magReject', 'eegReject', 'eogReject', 'ecgReject'] reject_pynames = ['grad', 'mag', 'eeg', 'eog', 'ecg'] reject = dict() for line in lines: words = line.split() if (words[0] in reject_names): reject[reject_pynames[reject_names.index(words[0])]] = float(words[1]) return reject
[ "def", "read_reject_parameters", "(", "fname", ")", ":", "try", ":", "with", "open", "(", "fname", ",", "'r'", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "except", ":", "raise", "ValueError", "(", "(", "'Error while reading %s'", "%", "fname", ")", ")", "reject_names", "=", "[", "'gradReject'", ",", "'magReject'", ",", "'eegReject'", ",", "'eogReject'", ",", "'ecgReject'", "]", "reject_pynames", "=", "[", "'grad'", ",", "'mag'", ",", "'eeg'", ",", "'eog'", ",", "'ecg'", "]", "reject", "=", "dict", "(", ")", "for", "line", "in", "lines", ":", "words", "=", "line", ".", "split", "(", ")", "if", "(", "words", "[", "0", "]", "in", "reject_names", ")", ":", "reject", "[", "reject_pynames", "[", "reject_names", ".", "index", "(", "words", "[", "0", "]", ")", "]", "]", "=", "float", "(", "words", "[", "1", "]", ")", "return", "reject" ]
read rejection parameters from .
train
false
6,520
def getTransformedVector3s(matrixTetragrid, vector3s): transformedVector3s = [] for vector3 in vector3s: transformedVector3s.append(getVector3TransformedByMatrix(matrixTetragrid, vector3)) return transformedVector3s
[ "def", "getTransformedVector3s", "(", "matrixTetragrid", ",", "vector3s", ")", ":", "transformedVector3s", "=", "[", "]", "for", "vector3", "in", "vector3s", ":", "transformedVector3s", ".", "append", "(", "getVector3TransformedByMatrix", "(", "matrixTetragrid", ",", "vector3", ")", ")", "return", "transformedVector3s" ]
get the vector3s multiplied by a matrix .
train
false
6,521
def expm_cond(A, check_finite=True): if check_finite: A = np.asarray_chkfinite(A) else: A = np.asarray(A) if ((len(A.shape) != 2) or (A.shape[0] != A.shape[1])): raise ValueError('expected a square matrix') X = scipy.linalg.expm(A) K = expm_frechet_kronform(A, check_finite=False) A_norm = scipy.linalg.norm(A, 'fro') X_norm = scipy.linalg.norm(X, 'fro') K_norm = scipy.linalg.norm(K, 2) kappa = ((K_norm * A_norm) / X_norm) return kappa
[ "def", "expm_cond", "(", "A", ",", "check_finite", "=", "True", ")", ":", "if", "check_finite", ":", "A", "=", "np", ".", "asarray_chkfinite", "(", "A", ")", "else", ":", "A", "=", "np", ".", "asarray", "(", "A", ")", "if", "(", "(", "len", "(", "A", ".", "shape", ")", "!=", "2", ")", "or", "(", "A", ".", "shape", "[", "0", "]", "!=", "A", ".", "shape", "[", "1", "]", ")", ")", ":", "raise", "ValueError", "(", "'expected a square matrix'", ")", "X", "=", "scipy", ".", "linalg", ".", "expm", "(", "A", ")", "K", "=", "expm_frechet_kronform", "(", "A", ",", "check_finite", "=", "False", ")", "A_norm", "=", "scipy", ".", "linalg", ".", "norm", "(", "A", ",", "'fro'", ")", "X_norm", "=", "scipy", ".", "linalg", ".", "norm", "(", "X", ",", "'fro'", ")", "K_norm", "=", "scipy", ".", "linalg", ".", "norm", "(", "K", ",", "2", ")", "kappa", "=", "(", "(", "K_norm", "*", "A_norm", ")", "/", "X_norm", ")", "return", "kappa" ]
relative condition number of the matrix exponential in the frobenius norm .
train
false
6,524
def getCylindricalByRadians(azimuthRadians, radius=1.0, z=0.0): polar = (radius * euclidean.getWiddershinsUnitPolar(azimuthRadians)) return Vector3(polar.real, polar.imag, z)
[ "def", "getCylindricalByRadians", "(", "azimuthRadians", ",", "radius", "=", "1.0", ",", "z", "=", "0.0", ")", ":", "polar", "=", "(", "radius", "*", "euclidean", ".", "getWiddershinsUnitPolar", "(", "azimuthRadians", ")", ")", "return", "Vector3", "(", "polar", ".", "real", ",", "polar", ".", "imag", ",", "z", ")" ]
get the cylindrical vector3 by radians .
train
false
6,525
def model_unpickle(model, attrs): cls = deferred_class_factory(model, attrs) return cls.__new__(cls)
[ "def", "model_unpickle", "(", "model", ",", "attrs", ")", ":", "cls", "=", "deferred_class_factory", "(", "model", ",", "attrs", ")", "return", "cls", ".", "__new__", "(", "cls", ")" ]
used to unpickle model subclasses with deferred fields .
train
false
6,526
def sampling_E(expr, given_condition=None, numsamples=1, evalf=True, **kwargs): samples = sample_iter(expr, given_condition, numsamples=numsamples, **kwargs) result = (Add(*list(samples)) / numsamples) if evalf: return result.evalf() else: return result
[ "def", "sampling_E", "(", "expr", ",", "given_condition", "=", "None", ",", "numsamples", "=", "1", ",", "evalf", "=", "True", ",", "**", "kwargs", ")", ":", "samples", "=", "sample_iter", "(", "expr", ",", "given_condition", ",", "numsamples", "=", "numsamples", ",", "**", "kwargs", ")", "result", "=", "(", "Add", "(", "*", "list", "(", "samples", ")", ")", "/", "numsamples", ")", "if", "evalf", ":", "return", "result", ".", "evalf", "(", ")", "else", ":", "return", "result" ]
sampling version of e see also p sampling_p sampling_density .
train
false
6,527
def matchStrength(x, y): return sum(((xi == yi) for (xi, yi) in zip(x, y)))
[ "def", "matchStrength", "(", "x", ",", "y", ")", ":", "return", "sum", "(", "(", "(", "xi", "==", "yi", ")", "for", "(", "xi", ",", "yi", ")", "in", "zip", "(", "x", ",", "y", ")", ")", ")" ]
compute the match strength for the individual *x* on the string *y* .
train
false
6,528
@pytest.mark.django_db def test_add_store_bad_path(po_directory, english): fs_path = '/some/fs/example.po' project = ProjectDBFactory(source_language=english) language = LanguageDBFactory() tp = TranslationProjectFactory(project=project, language=language) conf = config.get(project.__class__, instance=project) conf.set_config('pootle_fs.fs_type', 'localfs') conf.set_config('pootle_fs.fs_url', 'foo') store = StoreDBFactory(translation_project=tp, parent=tp.directory, name='example_store.po') with pytest.raises(ValidationError): StoreFS.objects.create(store=store, pootle_path='/some/other/path.po', path=fs_path)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_add_store_bad_path", "(", "po_directory", ",", "english", ")", ":", "fs_path", "=", "'/some/fs/example.po'", "project", "=", "ProjectDBFactory", "(", "source_language", "=", "english", ")", "language", "=", "LanguageDBFactory", "(", ")", "tp", "=", "TranslationProjectFactory", "(", "project", "=", "project", ",", "language", "=", "language", ")", "conf", "=", "config", ".", "get", "(", "project", ".", "__class__", ",", "instance", "=", "project", ")", "conf", ".", "set_config", "(", "'pootle_fs.fs_type'", ",", "'localfs'", ")", "conf", ".", "set_config", "(", "'pootle_fs.fs_url'", ",", "'foo'", ")", "store", "=", "StoreDBFactory", "(", "translation_project", "=", "tp", ",", "parent", "=", "tp", ".", "directory", ",", "name", "=", "'example_store.po'", ")", "with", "pytest", ".", "raises", "(", "ValidationError", ")", ":", "StoreFS", ".", "objects", ".", "create", "(", "store", "=", "store", ",", "pootle_path", "=", "'/some/other/path.po'", ",", "path", "=", "fs_path", ")" ]
try to create a store_fs where pootle_path and store .
train
false
6,531
def render_plugin(values): env = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')), auto_reload=False, keep_trailing_newline=True, autoescape=True) template = env.get_template('new_plugin.py.tmpl') return template.render(**values)
[ "def", "render_plugin", "(", "values", ")", ":", "env", "=", "jinja2", ".", "Environment", "(", "loader", "=", "jinja2", ".", "FileSystemLoader", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'templates'", ")", ")", ",", "auto_reload", "=", "False", ",", "keep_trailing_newline", "=", "True", ",", "autoescape", "=", "True", ")", "template", "=", "env", ".", "get_template", "(", "'new_plugin.py.tmpl'", ")", "return", "template", ".", "render", "(", "**", "values", ")" ]
render the jinja template for the plugin with the given values .
train
false
6,533
@utils.service_type('monitor') def do_type_list(cs, args): vtypes = cs.monitor_types.list() _print_monitor_type_list(vtypes)
[ "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_type_list", "(", "cs", ",", "args", ")", ":", "vtypes", "=", "cs", ".", "monitor_types", ".", "list", "(", ")", "_print_monitor_type_list", "(", "vtypes", ")" ]
print a list of available monitor types .
train
false
6,534
def distribution(): return s3_rest_controller()
[ "def", "distribution", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
mock android sdk distribution .
train
false
6,535
def _key2bin(s): kl = map((lambda x: ord(x)), s) kl = map((lambda x: (binary[(x / 16)] + binary[(x & 15)])), kl) return ''.join(kl)
[ "def", "_key2bin", "(", "s", ")", ":", "kl", "=", "map", "(", "(", "lambda", "x", ":", "ord", "(", "x", ")", ")", ",", "s", ")", "kl", "=", "map", "(", "(", "lambda", "x", ":", "(", "binary", "[", "(", "x", "/", "16", ")", "]", "+", "binary", "[", "(", "x", "&", "15", ")", "]", ")", ")", ",", "kl", ")", "return", "''", ".", "join", "(", "kl", ")" ]
convert a key into a string of binary digits .
train
false
6,536
def test_platform_validator(): hass = None try: hass = get_test_home_assistant() schema = vol.Schema(cv.platform_validator('light')) with pytest.raises(vol.MultipleInvalid): schema('platform_that_does_not_exist') schema('hue') finally: if (hass is not None): hass.stop()
[ "def", "test_platform_validator", "(", ")", ":", "hass", "=", "None", "try", ":", "hass", "=", "get_test_home_assistant", "(", ")", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "platform_validator", "(", "'light'", ")", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "schema", "(", "'platform_that_does_not_exist'", ")", "schema", "(", "'hue'", ")", "finally", ":", "if", "(", "hass", "is", "not", "None", ")", ":", "hass", ".", "stop", "(", ")" ]
test platform validation .
train
false
6,538
def as_float_array(X, copy=True, force_all_finite=True): if (isinstance(X, np.matrix) or ((not isinstance(X, np.ndarray)) and (not sp.issparse(X)))): return check_array(X, ['csr', 'csc', 'coo'], dtype=np.float64, copy=copy, force_all_finite=force_all_finite, ensure_2d=False) elif (sp.issparse(X) and (X.dtype in [np.float32, np.float64])): return (X.copy() if copy else X) elif (X.dtype in [np.float32, np.float64]): return (X.copy(('F' if X.flags['F_CONTIGUOUS'] else 'C')) if copy else X) else: return X.astype((np.float32 if (X.dtype == np.int32) else np.float64))
[ "def", "as_float_array", "(", "X", ",", "copy", "=", "True", ",", "force_all_finite", "=", "True", ")", ":", "if", "(", "isinstance", "(", "X", ",", "np", ".", "matrix", ")", "or", "(", "(", "not", "isinstance", "(", "X", ",", "np", ".", "ndarray", ")", ")", "and", "(", "not", "sp", ".", "issparse", "(", "X", ")", ")", ")", ")", ":", "return", "check_array", "(", "X", ",", "[", "'csr'", ",", "'csc'", ",", "'coo'", "]", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "copy", ",", "force_all_finite", "=", "force_all_finite", ",", "ensure_2d", "=", "False", ")", "elif", "(", "sp", ".", "issparse", "(", "X", ")", "and", "(", "X", ".", "dtype", "in", "[", "np", ".", "float32", ",", "np", ".", "float64", "]", ")", ")", ":", "return", "(", "X", ".", "copy", "(", ")", "if", "copy", "else", "X", ")", "elif", "(", "X", ".", "dtype", "in", "[", "np", ".", "float32", ",", "np", ".", "float64", "]", ")", ":", "return", "(", "X", ".", "copy", "(", "(", "'F'", "if", "X", ".", "flags", "[", "'F_CONTIGUOUS'", "]", "else", "'C'", ")", ")", "if", "copy", "else", "X", ")", "else", ":", "return", "X", ".", "astype", "(", "(", "np", ".", "float32", "if", "(", "X", ".", "dtype", "==", "np", ".", "int32", ")", "else", "np", ".", "float64", ")", ")" ]
converts an array-like to an array of floats the new dtype will be np .
train
true
6,540
def timelimit(timeout): def _1(function): def _2(*args, **kw): class Dispatch(threading.Thread, ): def __init__(self): threading.Thread.__init__(self) self.result = None self.error = None self.setDaemon(True) self.start() def run(self): try: self.result = function(*args, **kw) except: self.error = sys.exc_info() c = Dispatch() c.join(timeout) if c.isAlive(): raise TimeoutError() if c.error: raise c.error[0](c.error[1]) return c.result return _2 return _1
[ "def", "timelimit", "(", "timeout", ")", ":", "def", "_1", "(", "function", ")", ":", "def", "_2", "(", "*", "args", ",", "**", "kw", ")", ":", "class", "Dispatch", "(", "threading", ".", "Thread", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "threading", ".", "Thread", ".", "__init__", "(", "self", ")", "self", ".", "result", "=", "None", "self", ".", "error", "=", "None", "self", ".", "setDaemon", "(", "True", ")", "self", ".", "start", "(", ")", "def", "run", "(", "self", ")", ":", "try", ":", "self", ".", "result", "=", "function", "(", "*", "args", ",", "**", "kw", ")", "except", ":", "self", ".", "error", "=", "sys", ".", "exc_info", "(", ")", "c", "=", "Dispatch", "(", ")", "c", ".", "join", "(", "timeout", ")", "if", "c", ".", "isAlive", "(", ")", ":", "raise", "TimeoutError", "(", ")", "if", "c", ".", "error", ":", "raise", "c", ".", "error", "[", "0", "]", "(", "c", ".", "error", "[", "1", "]", ")", "return", "c", ".", "result", "return", "_2", "return", "_1" ]
a decorator to limit a function to timeout seconds .
train
true
6,541
def upgrade_to_float(*types): conv = {bool: float32, int8: float32, int16: float32, int32: float64, int64: float64, uint8: float32, uint16: float32, uint32: float64, uint64: float64} return (get_scalar_type(Scalar.upcast(*[conv.get(type, type) for type in types])),)
[ "def", "upgrade_to_float", "(", "*", "types", ")", ":", "conv", "=", "{", "bool", ":", "float32", ",", "int8", ":", "float32", ",", "int16", ":", "float32", ",", "int32", ":", "float64", ",", "int64", ":", "float64", ",", "uint8", ":", "float32", ",", "uint16", ":", "float32", ",", "uint32", ":", "float64", ",", "uint64", ":", "float64", "}", "return", "(", "get_scalar_type", "(", "Scalar", ".", "upcast", "(", "*", "[", "conv", ".", "get", "(", "type", ",", "type", ")", "for", "type", "in", "types", "]", ")", ")", ",", ")" ]
upgrade any int types to float32 or float64 to avoid losing precision .
train
false
6,542
def SetFilesProperty(output, variable, property_name, values, sep): output.write('set_source_files_properties(') WriteVariable(output, variable) output.write(' PROPERTIES ') output.write(property_name) output.write(' "') for value in values: output.write(CMakeStringEscape(value)) output.write(sep) output.write('")\n')
[ "def", "SetFilesProperty", "(", "output", ",", "variable", ",", "property_name", ",", "values", ",", "sep", ")", ":", "output", ".", "write", "(", "'set_source_files_properties('", ")", "WriteVariable", "(", "output", ",", "variable", ")", "output", ".", "write", "(", "' PROPERTIES '", ")", "output", ".", "write", "(", "property_name", ")", "output", ".", "write", "(", "' \"'", ")", "for", "value", "in", "values", ":", "output", ".", "write", "(", "CMakeStringEscape", "(", "value", ")", ")", "output", ".", "write", "(", "sep", ")", "output", ".", "write", "(", "'\")\\n'", ")" ]
given a set of source files .
train
false
6,544
def fixup_link(link, target_blank=True): if target_blank: link.set('target', '_blank') link.set('title', url_filename(link.get('href')))
[ "def", "fixup_link", "(", "link", ",", "target_blank", "=", "True", ")", ":", "if", "target_blank", ":", "link", ".", "set", "(", "'target'", ",", "'_blank'", ")", "link", ".", "set", "(", "'title'", ",", "url_filename", "(", "link", ".", "get", "(", "'href'", ")", ")", ")" ]
set certain attributes we want on every link .
train
false
6,545
def _item_path(item): try: path = item.path except AttributeError: path = u'' return path
[ "def", "_item_path", "(", "item", ")", ":", "try", ":", "path", "=", "item", ".", "path", "except", "AttributeError", ":", "path", "=", "u''", "return", "path" ]
return the items path .
train
false
6,546
def draw_values(params, point=None): givens = {} for param in params: if hasattr(param, 'name'): named_nodes = get_named_nodes(param) if (param.name in named_nodes): named_nodes.pop(param.name) for (name, node) in named_nodes.items(): if (not isinstance(node, (tt.sharedvar.TensorSharedVariable, tt.TensorConstant))): givens[name] = (node, draw_value(node, point=point)) values = [None for _ in params] for (i, param) in enumerate(params): values[i] = np.atleast_1d(draw_value(param, point=point, givens=givens.values())) if (len(values) == 1): return values[0] else: return values
[ "def", "draw_values", "(", "params", ",", "point", "=", "None", ")", ":", "givens", "=", "{", "}", "for", "param", "in", "params", ":", "if", "hasattr", "(", "param", ",", "'name'", ")", ":", "named_nodes", "=", "get_named_nodes", "(", "param", ")", "if", "(", "param", ".", "name", "in", "named_nodes", ")", ":", "named_nodes", ".", "pop", "(", "param", ".", "name", ")", "for", "(", "name", ",", "node", ")", "in", "named_nodes", ".", "items", "(", ")", ":", "if", "(", "not", "isinstance", "(", "node", ",", "(", "tt", ".", "sharedvar", ".", "TensorSharedVariable", ",", "tt", ".", "TensorConstant", ")", ")", ")", ":", "givens", "[", "name", "]", "=", "(", "node", ",", "draw_value", "(", "node", ",", "point", "=", "point", ")", ")", "values", "=", "[", "None", "for", "_", "in", "params", "]", "for", "(", "i", ",", "param", ")", "in", "enumerate", "(", "params", ")", ":", "values", "[", "i", "]", "=", "np", ".", "atleast_1d", "(", "draw_value", "(", "param", ",", "point", "=", "point", ",", "givens", "=", "givens", ".", "values", "(", ")", ")", ")", "if", "(", "len", "(", "values", ")", "==", "1", ")", ":", "return", "values", "[", "0", "]", "else", ":", "return", "values" ]
draw parameter values .
train
false
6,547
def topic_children_documents_link(obj): count = obj.children.count() if (not count): return '' link = ('%s?%s' % (reverse('admin:wiki_document_changelist', args=[]), ('parent_topic__exact=%s' % obj.id))) what = (((count == 1) and 'child') or 'children') return ('<a href="%s">%s&nbsp;%s</a>' % (link, count, what))
[ "def", "topic_children_documents_link", "(", "obj", ")", ":", "count", "=", "obj", ".", "children", ".", "count", "(", ")", "if", "(", "not", "count", ")", ":", "return", "''", "link", "=", "(", "'%s?%s'", "%", "(", "reverse", "(", "'admin:wiki_document_changelist'", ",", "args", "=", "[", "]", ")", ",", "(", "'parent_topic__exact=%s'", "%", "obj", ".", "id", ")", ")", ")", "what", "=", "(", "(", "(", "count", "==", "1", ")", "and", "'child'", ")", "or", "'children'", ")", "return", "(", "'<a href=\"%s\">%s&nbsp;%s</a>'", "%", "(", "link", ",", "count", ",", "what", ")", ")" ]
html link to a list of child documents .
train
false
6,548
def check_or_die(command): if (command is None): raise CommandNotFoundError("'None' is not a valid command.") if (not which(command)): raise CommandNotFoundError("'{0}' is not in the path".format(command))
[ "def", "check_or_die", "(", "command", ")", ":", "if", "(", "command", "is", "None", ")", ":", "raise", "CommandNotFoundError", "(", "\"'None' is not a valid command.\"", ")", "if", "(", "not", "which", "(", "command", ")", ")", ":", "raise", "CommandNotFoundError", "(", "\"'{0}' is not in the path\"", ".", "format", "(", "command", ")", ")" ]
simple convenience function for modules to use for gracefully blowing up if a required tool is not available in the system path .
train
false
6,549
def remove_user(user): user_pre_delete.send(instance=user, sender=user.__class__) user.username = u'deleted-{0}'.format(user.pk) while User.objects.filter(username=user.username).exists(): user.username = u'deleted-{0}-{1}'.format(user.pk, binascii.b2a_hex(os.urandom(5))) user.first_name = u'Deleted User' user.last_name = u'' user.email = u'noreply@weblate.org' user.is_active = False user.set_unusable_password() user.save() user.social_auth.all().delete()
[ "def", "remove_user", "(", "user", ")", ":", "user_pre_delete", ".", "send", "(", "instance", "=", "user", ",", "sender", "=", "user", ".", "__class__", ")", "user", ".", "username", "=", "u'deleted-{0}'", ".", "format", "(", "user", ".", "pk", ")", "while", "User", ".", "objects", ".", "filter", "(", "username", "=", "user", ".", "username", ")", ".", "exists", "(", ")", ":", "user", ".", "username", "=", "u'deleted-{0}-{1}'", ".", "format", "(", "user", ".", "pk", ",", "binascii", ".", "b2a_hex", "(", "os", ".", "urandom", "(", "5", ")", ")", ")", "user", ".", "first_name", "=", "u'Deleted User'", "user", ".", "last_name", "=", "u''", "user", ".", "email", "=", "u'noreply@weblate.org'", "user", ".", "is_active", "=", "False", "user", ".", "set_unusable_password", "(", ")", "user", ".", "save", "(", ")", "user", ".", "social_auth", ".", "all", "(", ")", ".", "delete", "(", ")" ]
remove user from conference .
train
false
6,550
def underscore_memoization(func): name = ('_' + func.__name__) def wrapper(self): try: return getattr(self, name) except AttributeError: result = func(self) if inspect.isgenerator(result): result = list(result) setattr(self, name, result) return result return wrapper
[ "def", "underscore_memoization", "(", "func", ")", ":", "name", "=", "(", "'_'", "+", "func", ".", "__name__", ")", "def", "wrapper", "(", "self", ")", ":", "try", ":", "return", "getattr", "(", "self", ",", "name", ")", "except", "AttributeError", ":", "result", "=", "func", "(", "self", ")", "if", "inspect", ".", "isgenerator", "(", "result", ")", ":", "result", "=", "list", "(", "result", ")", "setattr", "(", "self", ",", "name", ",", "result", ")", "return", "result", "return", "wrapper" ]
decorator for methods:: class a: def x: if self .
train
false
6,551
def estimate_total_entities(session, db_master, keyname): query = SimpleStatement('SELECT COUNT(*) FROM "{}"'.format(dbconstants.APP_ENTITY_TABLE), consistency_level=ConsistencyLevel.ONE) try: rows = session.execute(query)[0].count return str((rows / len(dbconstants.APP_ENTITY_SCHEMA))) except dbconstants.TRANSIENT_CASSANDRA_ERRORS: stats_cmd = '{nodetool} cfstats {keyspace}.{table}'.format(nodetool=cassandra_interface.NODE_TOOL, keyspace=cassandra_interface.KEYSPACE, table=dbconstants.APP_ENTITY_TABLE) stats = utils.ssh(db_master, keyname, stats_cmd, method=subprocess.check_output) for line in stats.splitlines(): if ('Number of keys (estimate)' in line): return '{} (estimate)'.format(line.split()[(-1)]) raise dbconstants.AppScaleDBError('Unable to estimate total entities.')
[ "def", "estimate_total_entities", "(", "session", ",", "db_master", ",", "keyname", ")", ":", "query", "=", "SimpleStatement", "(", "'SELECT COUNT(*) FROM \"{}\"'", ".", "format", "(", "dbconstants", ".", "APP_ENTITY_TABLE", ")", ",", "consistency_level", "=", "ConsistencyLevel", ".", "ONE", ")", "try", ":", "rows", "=", "session", ".", "execute", "(", "query", ")", "[", "0", "]", ".", "count", "return", "str", "(", "(", "rows", "/", "len", "(", "dbconstants", ".", "APP_ENTITY_SCHEMA", ")", ")", ")", "except", "dbconstants", ".", "TRANSIENT_CASSANDRA_ERRORS", ":", "stats_cmd", "=", "'{nodetool} cfstats {keyspace}.{table}'", ".", "format", "(", "nodetool", "=", "cassandra_interface", ".", "NODE_TOOL", ",", "keyspace", "=", "cassandra_interface", ".", "KEYSPACE", ",", "table", "=", "dbconstants", ".", "APP_ENTITY_TABLE", ")", "stats", "=", "utils", ".", "ssh", "(", "db_master", ",", "keyname", ",", "stats_cmd", ",", "method", "=", "subprocess", ".", "check_output", ")", "for", "line", "in", "stats", ".", "splitlines", "(", ")", ":", "if", "(", "'Number of keys (estimate)'", "in", "line", ")", ":", "return", "'{} (estimate)'", ".", "format", "(", "line", ".", "split", "(", ")", "[", "(", "-", "1", ")", "]", ")", "raise", "dbconstants", ".", "AppScaleDBError", "(", "'Unable to estimate total entities.'", ")" ]
estimate the total number of entities .
train
false
6,552
def wrap_conf_type(f): def wrapper(conf_path, *args, **kwargs): if os.path.isdir(conf_path): conf_type = 'config_dir' else: conf_type = 'config' conf_uri = ('%s:%s' % (conf_type, conf_path)) return f(conf_uri, *args, **kwargs) return wrapper
[ "def", "wrap_conf_type", "(", "f", ")", ":", "def", "wrapper", "(", "conf_path", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "conf_path", ")", ":", "conf_type", "=", "'config_dir'", "else", ":", "conf_type", "=", "'config'", "conf_uri", "=", "(", "'%s:%s'", "%", "(", "conf_type", ",", "conf_path", ")", ")", "return", "f", "(", "conf_uri", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
wrap a function whos first argument is a paste .
train
false
6,553
def deduce_helpful_msg(req): msg = '' if os.path.exists(req): msg = ' It does exist.' try: with open(req, 'r') as fp: parse_requirements(fp.read()).next() msg += ((((' The argument you provided ' + ('(%s) appears to be a' % req)) + ' requirements file. If that is the') + " case, use the '-r' flag to install") + ' the packages specified within it.') except RequirementParseError: logger.debug(("Cannot parse '%s' as requirements file" % req), exc_info=1) else: msg += (" File '%s' does not exist." % req) return msg
[ "def", "deduce_helpful_msg", "(", "req", ")", ":", "msg", "=", "''", "if", "os", ".", "path", ".", "exists", "(", "req", ")", ":", "msg", "=", "' It does exist.'", "try", ":", "with", "open", "(", "req", ",", "'r'", ")", "as", "fp", ":", "parse_requirements", "(", "fp", ".", "read", "(", ")", ")", ".", "next", "(", ")", "msg", "+=", "(", "(", "(", "(", "' The argument you provided '", "+", "(", "'(%s) appears to be a'", "%", "req", ")", ")", "+", "' requirements file. If that is the'", ")", "+", "\" case, use the '-r' flag to install\"", ")", "+", "' the packages specified within it.'", ")", "except", "RequirementParseError", ":", "logger", ".", "debug", "(", "(", "\"Cannot parse '%s' as requirements file\"", "%", "req", ")", ",", "exc_info", "=", "1", ")", "else", ":", "msg", "+=", "(", "\" File '%s' does not exist.\"", "%", "req", ")", "return", "msg" ]
returns helpful msg in case requirements file does not exist .
train
true
6,554
def _get_axis(indexes): ndim = len(indexes) indexes = [(slice(None, None) if (i is None) else [0]) for i in indexes] x = np.empty(((2,) * ndim)) x2 = x[tuple(indexes)] return x2.shape.index(1)
[ "def", "_get_axis", "(", "indexes", ")", ":", "ndim", "=", "len", "(", "indexes", ")", "indexes", "=", "[", "(", "slice", "(", "None", ",", "None", ")", "if", "(", "i", "is", "None", ")", "else", "[", "0", "]", ")", "for", "i", "in", "indexes", "]", "x", "=", "np", ".", "empty", "(", "(", "(", "2", ",", ")", "*", "ndim", ")", ")", "x2", "=", "x", "[", "tuple", "(", "indexes", ")", "]", "return", "x2", ".", "shape", ".", "index", "(", "1", ")" ]
get axis along which point-wise slicing results lie this is mostly a hack because i cant figure out numpys rule on this and cant be bothered to go reading .
train
false
6,555
def get_imdb(name): if (not __sets.has_key(name)): raise KeyError('Unknown dataset: {}'.format(name)) return __sets[name]()
[ "def", "get_imdb", "(", "name", ")", ":", "if", "(", "not", "__sets", ".", "has_key", "(", "name", ")", ")", ":", "raise", "KeyError", "(", "'Unknown dataset: {}'", ".", "format", "(", "name", ")", ")", "return", "__sets", "[", "name", "]", "(", ")" ]
get an imdb by name .
train
false
6,556
def leftordered(M): l = list(M.T) l.sort(key=tuple) return array(l)[::(-1)].T
[ "def", "leftordered", "(", "M", ")", ":", "l", "=", "list", "(", "M", ".", "T", ")", "l", ".", "sort", "(", "key", "=", "tuple", ")", "return", "array", "(", "l", ")", "[", ":", ":", "(", "-", "1", ")", "]", ".", "T" ]
returns the given matrix in left-ordered-form .
train
false
6,557
def credsfromfile(creds_file=None, subdir=None, verbose=False): return Authenticate().load_creds(creds_file=creds_file, subdir=subdir, verbose=verbose)
[ "def", "credsfromfile", "(", "creds_file", "=", "None", ",", "subdir", "=", "None", ",", "verbose", "=", "False", ")", ":", "return", "Authenticate", "(", ")", ".", "load_creds", "(", "creds_file", "=", "creds_file", ",", "subdir", "=", "subdir", ",", "verbose", "=", "verbose", ")" ]
convenience function for authentication .
train
false
6,558
@subscriber(ResourceChanged, for_resources=('collection',), for_actions=(ACTIONS.DELETE,)) def on_collections_deleted(event): storage = event.request.registry.storage permission = event.request.registry.permission for change in event.impacted_records: collection = change['old'] bucket_id = event.payload['bucket_id'] parent_id = utils.instance_uri(event.request, 'collection', bucket_id=bucket_id, id=collection['id']) storage.delete_all(collection_id='record', parent_id=parent_id, with_deleted=False) storage.purge_deleted(collection_id='record', parent_id=parent_id) permission.delete_object_permissions((parent_id + '*'))
[ "@", "subscriber", "(", "ResourceChanged", ",", "for_resources", "=", "(", "'collection'", ",", ")", ",", "for_actions", "=", "(", "ACTIONS", ".", "DELETE", ",", ")", ")", "def", "on_collections_deleted", "(", "event", ")", ":", "storage", "=", "event", ".", "request", ".", "registry", ".", "storage", "permission", "=", "event", ".", "request", ".", "registry", ".", "permission", "for", "change", "in", "event", ".", "impacted_records", ":", "collection", "=", "change", "[", "'old'", "]", "bucket_id", "=", "event", ".", "payload", "[", "'bucket_id'", "]", "parent_id", "=", "utils", ".", "instance_uri", "(", "event", ".", "request", ",", "'collection'", ",", "bucket_id", "=", "bucket_id", ",", "id", "=", "collection", "[", "'id'", "]", ")", "storage", ".", "delete_all", "(", "collection_id", "=", "'record'", ",", "parent_id", "=", "parent_id", ",", "with_deleted", "=", "False", ")", "storage", ".", "purge_deleted", "(", "collection_id", "=", "'record'", ",", "parent_id", "=", "parent_id", ")", "permission", ".", "delete_object_permissions", "(", "(", "parent_id", "+", "'*'", ")", ")" ]
some collections were deleted .
train
false
6,559
def get_webdefault_xml(sdk_root=None): if (not sdk_root): sdk_root = _SDK_ROOT with open(os.path.join(sdk_root, _WEBDEFAULT_XML_PATH)) as f: return f.read()
[ "def", "get_webdefault_xml", "(", "sdk_root", "=", "None", ")", ":", "if", "(", "not", "sdk_root", ")", ":", "sdk_root", "=", "_SDK_ROOT", "with", "open", "(", "os", ".", "path", ".", "join", "(", "sdk_root", ",", "_WEBDEFAULT_XML_PATH", ")", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
returns the contents of webdefault .
train
false
6,560
def test_batch_normalization_image_size_setter(): bn = BatchNormalization() bn.image_size = (5, 4) assert (bn.input_dim == (None, 5, 4)) bn.image_size = (4, 5) assert (bn.input_dim == (None, 4, 5))
[ "def", "test_batch_normalization_image_size_setter", "(", ")", ":", "bn", "=", "BatchNormalization", "(", ")", "bn", ".", "image_size", "=", "(", "5", ",", "4", ")", "assert", "(", "bn", ".", "input_dim", "==", "(", "None", ",", "5", ",", "4", ")", ")", "bn", ".", "image_size", "=", "(", "4", ",", "5", ")", "assert", "(", "bn", ".", "input_dim", "==", "(", "None", ",", "4", ",", "5", ")", ")" ]
test that setting image_size on a batchnormalization works .
train
false
6,561
def get_siblings(x, collection=None): parents = get_parents(x, collection) siblings = set([]) for parent in parents: siblings.update(get_children(parent, collection)) siblings.discard(x) return list(siblings)
[ "def", "get_siblings", "(", "x", ",", "collection", "=", "None", ")", ":", "parents", "=", "get_parents", "(", "x", ",", "collection", ")", "siblings", "=", "set", "(", "[", "]", ")", "for", "parent", "in", "parents", ":", "siblings", ".", "update", "(", "get_children", "(", "parent", ",", "collection", ")", ")", "siblings", ".", "discard", "(", "x", ")", "return", "list", "(", "siblings", ")" ]
get sibling random variables of input .
train
false
6,562
def sig_stars(p): if (p < 0.001): return '***' elif (p < 0.01): return '**' elif (p < 0.05): return '*' elif (p < 0.1): return '.' return ''
[ "def", "sig_stars", "(", "p", ")", ":", "if", "(", "p", "<", "0.001", ")", ":", "return", "'***'", "elif", "(", "p", "<", "0.01", ")", ":", "return", "'**'", "elif", "(", "p", "<", "0.05", ")", ":", "return", "'*'", "elif", "(", "p", "<", "0.1", ")", ":", "return", "'.'", "return", "''" ]
return a r-style significance string corresponding to p values .
train
false
6,563
def recreate_instances_in_mig(mig): changed = False return_data = [] actions_filter = ['RECREATING'] if mig.recreate_instances(): changed = True return_data = _get_instance_list(mig, filter_list=actions_filter) return (changed, return_data)
[ "def", "recreate_instances_in_mig", "(", "mig", ")", ":", "changed", "=", "False", "return_data", "=", "[", "]", "actions_filter", "=", "[", "'RECREATING'", "]", "if", "mig", ".", "recreate_instances", "(", ")", ":", "changed", "=", "True", "return_data", "=", "_get_instance_list", "(", "mig", ",", "filter_list", "=", "actions_filter", ")", "return", "(", "changed", ",", "return_data", ")" ]
recreate the instances for a managed instance group .
train
false
6,564
def ylabel(s, *args, **kwargs): return gca().set_ylabel(s, *args, **kwargs)
[ "def", "ylabel", "(", "s", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "gca", "(", ")", ".", "set_ylabel", "(", "s", ",", "*", "args", ",", "**", "kwargs", ")" ]
set the *y* axis label of the current axis to *s* .
train
false
6,565
def load_handlers(config, handler_names): handlers = [] if isinstance(handler_names, basestring): handler_names = [handler_names] for handler in handler_names: logger.debug('Loading Handler %s', handler) try: cls = load_dynamic_class(handler, Handler) cls_name = cls.__name__ handler_config = configobj.ConfigObj() handler_config.merge(config['handlers']['default']) if (cls_name in config['handlers']): handler_config.merge(config['handlers'][cls_name]) if ('handlers_config_path' in config['server']): configfile = (os.path.join(config['server']['handlers_config_path'], cls_name) + '.conf') if os.path.exists(configfile): handler_config.merge(configobj.ConfigObj(configfile)) h = cls(handler_config) handlers.append(h) except (ImportError, SyntaxError): logger.warning('Failed to load handler %s. %s', handler, traceback.format_exc()) continue return handlers
[ "def", "load_handlers", "(", "config", ",", "handler_names", ")", ":", "handlers", "=", "[", "]", "if", "isinstance", "(", "handler_names", ",", "basestring", ")", ":", "handler_names", "=", "[", "handler_names", "]", "for", "handler", "in", "handler_names", ":", "logger", ".", "debug", "(", "'Loading Handler %s'", ",", "handler", ")", "try", ":", "cls", "=", "load_dynamic_class", "(", "handler", ",", "Handler", ")", "cls_name", "=", "cls", ".", "__name__", "handler_config", "=", "configobj", ".", "ConfigObj", "(", ")", "handler_config", ".", "merge", "(", "config", "[", "'handlers'", "]", "[", "'default'", "]", ")", "if", "(", "cls_name", "in", "config", "[", "'handlers'", "]", ")", ":", "handler_config", ".", "merge", "(", "config", "[", "'handlers'", "]", "[", "cls_name", "]", ")", "if", "(", "'handlers_config_path'", "in", "config", "[", "'server'", "]", ")", ":", "configfile", "=", "(", "os", ".", "path", ".", "join", "(", "config", "[", "'server'", "]", "[", "'handlers_config_path'", "]", ",", "cls_name", ")", "+", "'.conf'", ")", "if", "os", ".", "path", ".", "exists", "(", "configfile", ")", ":", "handler_config", ".", "merge", "(", "configobj", ".", "ConfigObj", "(", "configfile", ")", ")", "h", "=", "cls", "(", "handler_config", ")", "handlers", ".", "append", "(", "h", ")", "except", "(", "ImportError", ",", "SyntaxError", ")", ":", "logger", ".", "warning", "(", "'Failed to load handler %s. %s'", ",", "handler", ",", "traceback", ".", "format_exc", "(", ")", ")", "continue", "return", "handlers" ]
load handlers .
train
true
6,566
@celery.task def receive_async(text, connection_id, message_id, fields): from rapidsms.models import Connection from rapidsms.router import get_router logger.debug('receive_async: %s', text) router = get_router() connection = Connection.objects.select_related().get(pk=connection_id) message = router.new_incoming_message(text=text, connections=[connection], id_=message_id, fields=fields) try: router.process_incoming(message) except Exception: logger.exception('Exception processing incoming message') raise
[ "@", "celery", ".", "task", "def", "receive_async", "(", "text", ",", "connection_id", ",", "message_id", ",", "fields", ")", ":", "from", "rapidsms", ".", "models", "import", "Connection", "from", "rapidsms", ".", "router", "import", "get_router", "logger", ".", "debug", "(", "'receive_async: %s'", ",", "text", ")", "router", "=", "get_router", "(", ")", "connection", "=", "Connection", ".", "objects", ".", "select_related", "(", ")", ".", "get", "(", "pk", "=", "connection_id", ")", "message", "=", "router", ".", "new_incoming_message", "(", "text", "=", "text", ",", "connections", "=", "[", "connection", "]", ",", "id_", "=", "message_id", ",", "fields", "=", "fields", ")", "try", ":", "router", ".", "process_incoming", "(", "message", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "'Exception processing incoming message'", ")", "raise" ]
task used to send inbound message through router phases .
train
false
6,567
@task(base=BaseInstructorTask) def enrollment_report_features_csv(entry_id, xmodule_instance_args): action_name = ugettext_noop('generating_enrollment_report') task_fn = partial(upload_enrollment_report, xmodule_instance_args) return run_main_task(entry_id, task_fn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "enrollment_report_features_csv", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "ugettext_noop", "(", "'generating_enrollment_report'", ")", "task_fn", "=", "partial", "(", "upload_enrollment_report", ",", "xmodule_instance_args", ")", "return", "run_main_task", "(", "entry_id", ",", "task_fn", ",", "action_name", ")" ]
compute student profile information for a course and upload the csv to an s3 bucket for download .
train
false
6,569
def printhdr(file): hdr = gethdr(open(file, 'r')) (data_size, encoding, sample_rate, channels, info) = hdr while (info[(-1):] == '\x00'): info = info[:(-1)] print 'File name: ', file print 'Data size: ', data_size print 'Encoding: ', encoding print 'Sample rate:', sample_rate print 'Channels: ', channels print 'Info: ', repr(info)
[ "def", "printhdr", "(", "file", ")", ":", "hdr", "=", "gethdr", "(", "open", "(", "file", ",", "'r'", ")", ")", "(", "data_size", ",", "encoding", ",", "sample_rate", ",", "channels", ",", "info", ")", "=", "hdr", "while", "(", "info", "[", "(", "-", "1", ")", ":", "]", "==", "'\\x00'", ")", ":", "info", "=", "info", "[", ":", "(", "-", "1", ")", "]", "print", "'File name: '", ",", "file", "print", "'Data size: '", ",", "data_size", "print", "'Encoding: '", ",", "encoding", "print", "'Sample rate:'", ",", "sample_rate", "print", "'Channels: '", ",", "channels", "print", "'Info: '", ",", "repr", "(", "info", ")" ]
read and print the sound header of a named file .
train
false
6,574
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
returns a 2-d identity array .
train
false
6,576
def sample_func(v): return (v + v)
[ "def", "sample_func", "(", "v", ")", ":", "return", "(", "v", "+", "v", ")" ]
blah blah .
train
false
6,577
def bitbucket_webhook_helper(data): repos = [(repo % {'full_name': data['repository']['full_name']}) for repo in BITBUCKET_REPOS] return {'service_long_name': 'Bitbucket', 'repo_url': data['repository']['links']['html']['href'], 'repos': repos, 'branch': data['push']['changes'][(-1)]['new']['name']}
[ "def", "bitbucket_webhook_helper", "(", "data", ")", ":", "repos", "=", "[", "(", "repo", "%", "{", "'full_name'", ":", "data", "[", "'repository'", "]", "[", "'full_name'", "]", "}", ")", "for", "repo", "in", "BITBUCKET_REPOS", "]", "return", "{", "'service_long_name'", ":", "'Bitbucket'", ",", "'repo_url'", ":", "data", "[", "'repository'", "]", "[", "'links'", "]", "[", "'html'", "]", "[", "'href'", "]", ",", "'repos'", ":", "repos", ",", "'branch'", ":", "data", "[", "'push'", "]", "[", "'changes'", "]", "[", "(", "-", "1", ")", "]", "[", "'new'", "]", "[", "'name'", "]", "}" ]
api to handle webhooks from bitbucket .
train
false
6,578
def _split_repo_str(repo): split = sourceslist.SourceEntry(repo) return (split.type, split.uri, split.dist, split.comps)
[ "def", "_split_repo_str", "(", "repo", ")", ":", "split", "=", "sourceslist", ".", "SourceEntry", "(", "repo", ")", "return", "(", "split", ".", "type", ",", "split", ".", "uri", ",", "split", ".", "dist", ",", "split", ".", "comps", ")" ]
return apt source entry as a tuple .
train
true
6,579
def apt_get_install(args, sudo=False): return _from_args(sudo)((['apt-get', '-y', 'install'] + args))
[ "def", "apt_get_install", "(", "args", ",", "sudo", "=", "False", ")", ":", "return", "_from_args", "(", "sudo", ")", "(", "(", "[", "'apt-get'", ",", "'-y'", ",", "'install'", "]", "+", "args", ")", ")" ]
install a package with apt-get .
train
false
6,580
def test_ctor(refresher): assert (len(refresher.refreshers) > 0) actual_handlers = list(refresher.refreshers.keys()) expected_handlers = ['schemata', 'tables', 'views', 'functions', 'types', 'databases', 'casing'] assert (expected_handlers == actual_handlers)
[ "def", "test_ctor", "(", "refresher", ")", ":", "assert", "(", "len", "(", "refresher", ".", "refreshers", ")", ">", "0", ")", "actual_handlers", "=", "list", "(", "refresher", ".", "refreshers", ".", "keys", "(", ")", ")", "expected_handlers", "=", "[", "'schemata'", ",", "'tables'", ",", "'views'", ",", "'functions'", ",", "'types'", ",", "'databases'", ",", "'casing'", "]", "assert", "(", "expected_handlers", "==", "actual_handlers", ")" ]
refresher object should contain a few handlers .
train
false
6,581
def resync(hive, hive_names, timeout=2, max_attempts=5): for hostname in hive_names: if (hive[hostname] is not None): for attempts in range(0, max_attempts): if (not hive[hostname].prompt(timeout=timeout)): break
[ "def", "resync", "(", "hive", ",", "hive_names", ",", "timeout", "=", "2", ",", "max_attempts", "=", "5", ")", ":", "for", "hostname", "in", "hive_names", ":", "if", "(", "hive", "[", "hostname", "]", "is", "not", "None", ")", ":", "for", "attempts", "in", "range", "(", "0", ",", "max_attempts", ")", ":", "if", "(", "not", "hive", "[", "hostname", "]", ".", "prompt", "(", "timeout", "=", "timeout", ")", ")", ":", "break" ]
this waits for the shell prompt for each host in an effort to try to get them all to the same state .
train
false
6,582
@frappe.whitelist() def getdoc(doctype, name, user=None): if (not (doctype and name)): raise Exception, u'doctype and name required!' if (not name): name = doctype if (not frappe.db.exists(doctype, name)): return [] try: doc = frappe.get_doc(doctype, name) run_onload(doc) if (not doc.has_permission(u'read')): raise frappe.PermissionError, (u'read', doctype, name) doc.apply_fieldlevel_read_permissions() get_docinfo(doc) except Exception: frappe.errprint(frappe.utils.get_traceback()) frappe.msgprint(_(u'Did not load')) raise if (doc and (not name.startswith(u'_'))): frappe.get_user().update_recent(doctype, name) doc.add_seen() frappe.response.docs.append(doc)
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "getdoc", "(", "doctype", ",", "name", ",", "user", "=", "None", ")", ":", "if", "(", "not", "(", "doctype", "and", "name", ")", ")", ":", "raise", "Exception", ",", "u'doctype and name required!'", "if", "(", "not", "name", ")", ":", "name", "=", "doctype", "if", "(", "not", "frappe", ".", "db", ".", "exists", "(", "doctype", ",", "name", ")", ")", ":", "return", "[", "]", "try", ":", "doc", "=", "frappe", ".", "get_doc", "(", "doctype", ",", "name", ")", "run_onload", "(", "doc", ")", "if", "(", "not", "doc", ".", "has_permission", "(", "u'read'", ")", ")", ":", "raise", "frappe", ".", "PermissionError", ",", "(", "u'read'", ",", "doctype", ",", "name", ")", "doc", ".", "apply_fieldlevel_read_permissions", "(", ")", "get_docinfo", "(", "doc", ")", "except", "Exception", ":", "frappe", ".", "errprint", "(", "frappe", ".", "utils", ".", "get_traceback", "(", ")", ")", "frappe", ".", "msgprint", "(", "_", "(", "u'Did not load'", ")", ")", "raise", "if", "(", "doc", "and", "(", "not", "name", ".", "startswith", "(", "u'_'", ")", ")", ")", ":", "frappe", ".", "get_user", "(", ")", ".", "update_recent", "(", "doctype", ",", "name", ")", "doc", ".", "add_seen", "(", ")", "frappe", ".", "response", ".", "docs", ".", "append", "(", "doc", ")" ]
get the doc string or comments for an object .
train
false
6,583
def _make_evokeds(coefs, conds, cond_length, tmin_s, tmax_s, info): evokeds = dict() cumul = 0 for cond in conds: (tmin_, tmax_) = (tmin_s[cond], tmax_s[cond]) evokeds[cond] = EvokedArray(coefs[:, cumul:((cumul + tmax_) - tmin_)], info=info, comment=cond, tmin=(tmin_ / float(info['sfreq'])), nave=cond_length[cond], kind='average') cumul += (tmax_ - tmin_) return evokeds
[ "def", "_make_evokeds", "(", "coefs", ",", "conds", ",", "cond_length", ",", "tmin_s", ",", "tmax_s", ",", "info", ")", ":", "evokeds", "=", "dict", "(", ")", "cumul", "=", "0", "for", "cond", "in", "conds", ":", "(", "tmin_", ",", "tmax_", ")", "=", "(", "tmin_s", "[", "cond", "]", ",", "tmax_s", "[", "cond", "]", ")", "evokeds", "[", "cond", "]", "=", "EvokedArray", "(", "coefs", "[", ":", ",", "cumul", ":", "(", "(", "cumul", "+", "tmax_", ")", "-", "tmin_", ")", "]", ",", "info", "=", "info", ",", "comment", "=", "cond", ",", "tmin", "=", "(", "tmin_", "/", "float", "(", "info", "[", "'sfreq'", "]", ")", ")", ",", "nave", "=", "cond_length", "[", "cond", "]", ",", "kind", "=", "'average'", ")", "cumul", "+=", "(", "tmax_", "-", "tmin_", ")", "return", "evokeds" ]
create a dictionary of evoked objects .
train
false
6,585
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): if (sys.platform == 'win32'): if (appauthor is None): appauthor = appname path = os.path.normpath(_get_win_folder('CSIDL_COMMON_APPDATA')) if appname: path = os.path.join(path, appauthor, appname) elif (sys.platform == 'darwin'): path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if (appname and version): path = os.path.join(path, version) return path
[ "def", "site_data_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "multipath", "=", "False", ")", ":", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "if", "(", "appauthor", "is", "None", ")", ":", "appauthor", "=", "appname", "path", "=", "os", ".", "path", ".", "normpath", "(", "_get_win_folder", "(", "'CSIDL_COMMON_APPDATA'", ")", ")", "if", "appname", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "appauthor", ",", "appname", ")", "elif", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "'/Library/Application Support'", ")", "if", "appname", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "appname", ")", "else", ":", "path", "=", "os", ".", "getenv", "(", "'XDG_DATA_DIRS'", ",", "os", ".", "pathsep", ".", "join", "(", "[", "'/usr/local/share'", ",", "'/usr/share'", "]", ")", ")", "pathlist", "=", "[", "os", ".", "path", ".", "expanduser", "(", "x", ".", "rstrip", "(", "os", ".", "sep", ")", ")", "for", "x", "in", "path", ".", "split", "(", "os", ".", "pathsep", ")", "]", "if", "appname", ":", "if", "version", ":", "appname", "=", "os", ".", "path", ".", "join", "(", "appname", ",", "version", ")", "pathlist", "=", "[", "os", ".", "sep", ".", "join", "(", "[", "x", ",", "appname", "]", ")", "for", "x", "in", "pathlist", "]", "if", "multipath", ":", "path", "=", "os", ".", "pathsep", ".", "join", "(", "pathlist", ")", "else", ":", "path", "=", "pathlist", "[", "0", "]", "return", "path", "if", "(", "appname", "and", "version", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "version", ")", "return", "path" ]
return full path to the user-shared data dir for this application .
train
true
6,586
def request_soup(url, **kwargs): parser = kwargs.pop('parser', 'html5lib') response = request_response(url, **kwargs) if (response is not None): return BeautifulSoup(response.content, parser)
[ "def", "request_soup", "(", "url", ",", "**", "kwargs", ")", ":", "parser", "=", "kwargs", ".", "pop", "(", "'parser'", ",", "'html5lib'", ")", "response", "=", "request_response", "(", "url", ",", "**", "kwargs", ")", "if", "(", "response", "is", "not", "None", ")", ":", "return", "BeautifulSoup", "(", "response", ".", "content", ",", "parser", ")" ]
wrapper for request_response .
train
false
6,587
def is_percentage_failure(error_message): if ('Subprocess return code: 1' not in error_message): return False else: return True
[ "def", "is_percentage_failure", "(", "error_message", ")", ":", "if", "(", "'Subprocess return code: 1'", "not", "in", "error_message", ")", ":", "return", "False", "else", ":", "return", "True" ]
when diff-quality is run with a threshold percentage .
train
false
6,588
def readAuthorizedKeyFile(fileobj, parseKey=keys.Key.fromString): for line in fileobj: line = line.strip() if (line and (not line.startswith('#'))): try: (yield parseKey(line)) except keys.BadKeyError as e: log.msg('Unable to parse line "{0}" as a key: {1!s}'.format(line, e))
[ "def", "readAuthorizedKeyFile", "(", "fileobj", ",", "parseKey", "=", "keys", ".", "Key", ".", "fromString", ")", ":", "for", "line", "in", "fileobj", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "line", "and", "(", "not", "line", ".", "startswith", "(", "'#'", ")", ")", ")", ":", "try", ":", "(", "yield", "parseKey", "(", "line", ")", ")", "except", "keys", ".", "BadKeyError", "as", "e", ":", "log", ".", "msg", "(", "'Unable to parse line \"{0}\" as a key: {1!s}'", ".", "format", "(", "line", ",", "e", ")", ")" ]
reads keys from an authorized keys file .
train
false
6,589
def module_list(path): if (path == ''): path = '.' pjoin = os.path.join if os.path.isdir(path): files = [] for (root, dirs, nondirs) in os.walk(path): subdir = root[(len(path) + 1):] if subdir: files.extend((pjoin(subdir, f) for f in nondirs)) dirs[:] = [] else: files.extend(nondirs) else: try: files = list(zipimporter(path)._files.keys()) except: files = [] modules = [] for f in files: m = import_re.match(f) if m: modules.append(m.group('name')) return list(set(modules))
[ "def", "module_list", "(", "path", ")", ":", "if", "(", "path", "==", "''", ")", ":", "path", "=", "'.'", "pjoin", "=", "os", ".", "path", ".", "join", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "files", "=", "[", "]", "for", "(", "root", ",", "dirs", ",", "nondirs", ")", "in", "os", ".", "walk", "(", "path", ")", ":", "subdir", "=", "root", "[", "(", "len", "(", "path", ")", "+", "1", ")", ":", "]", "if", "subdir", ":", "files", ".", "extend", "(", "(", "pjoin", "(", "subdir", ",", "f", ")", "for", "f", "in", "nondirs", ")", ")", "dirs", "[", ":", "]", "=", "[", "]", "else", ":", "files", ".", "extend", "(", "nondirs", ")", "else", ":", "try", ":", "files", "=", "list", "(", "zipimporter", "(", "path", ")", ".", "_files", ".", "keys", "(", ")", ")", "except", ":", "files", "=", "[", "]", "modules", "=", "[", "]", "for", "f", "in", "files", ":", "m", "=", "import_re", ".", "match", "(", "f", ")", "if", "m", ":", "modules", ".", "append", "(", "m", ".", "group", "(", "'name'", ")", ")", "return", "list", "(", "set", "(", "modules", ")", ")" ]
return the list containing the names of the modules available in the given folder .
train
false
6,590
def flatten_blocks(lines, num_indents=(-1)): INDENTATION = (' ' * 4) if (not lines): return '' if isinstance(lines, six.string_types): return ((INDENTATION * num_indents) + lines) return '\n'.join([flatten_blocks(line, (num_indents + 1)) for line in lines])
[ "def", "flatten_blocks", "(", "lines", ",", "num_indents", "=", "(", "-", "1", ")", ")", ":", "INDENTATION", "=", "(", "' '", "*", "4", ")", "if", "(", "not", "lines", ")", ":", "return", "''", "if", "isinstance", "(", "lines", ",", "six", ".", "string_types", ")", ":", "return", "(", "(", "INDENTATION", "*", "num_indents", ")", "+", "lines", ")", "return", "'\\n'", ".", "join", "(", "[", "flatten_blocks", "(", "line", ",", "(", "num_indents", "+", "1", ")", ")", "for", "line", "in", "lines", "]", ")" ]
takes a list or string and flattens it into a string with indentation .
train
true
6,591
def hist_calls(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): _histogram = histogram(('%s_calls' % get_qualname(fn))) rtn = fn(*args, **kwargs) if (type(rtn) in (int, float)): _histogram.update(rtn) return rtn return wrapper
[ "def", "hist_calls", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "_histogram", "=", "histogram", "(", "(", "'%s_calls'", "%", "get_qualname", "(", "fn", ")", ")", ")", "rtn", "=", "fn", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "type", "(", "rtn", ")", "in", "(", "int", ",", "float", ")", ")", ":", "_histogram", ".", "update", "(", "rtn", ")", "return", "rtn", "return", "wrapper" ]
decorator to check the distribution of return values of a function .
train
false
6,592
def indexes(db=None): return indices(db)
[ "def", "indexes", "(", "db", "=", "None", ")", ":", "return", "indices", "(", "db", ")" ]
show all indices in the database .
train
false
6,593
def register_home(route): global _home _home = route
[ "def", "register_home", "(", "route", ")", ":", "global", "_home", "_home", "=", "route" ]
registers ui home page .
train
false
6,596
@handle_response_format @treeio_login_required @module_admin_required() def page_add(request, response_format='html'): if request.POST: form = PageForm(request.POST) if form.is_valid(): page = form.save() return HttpResponseRedirect(reverse('core_admin_page_view', args=[page.id])) else: form = PageForm() return render_to_response('core/administration/page_add', {'form': form}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "page_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "POST", ":", "form", "=", "PageForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "page", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'core_admin_page_view'", ",", "args", "=", "[", "page", ".", "id", "]", ")", ")", "else", ":", "form", "=", "PageForm", "(", ")", "return", "render_to_response", "(", "'core/administration/page_add'", ",", "{", "'form'", ":", "form", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
static page add .
train
false
6,597
def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode): if ((indexer_id is None) or (sceneSeason is None) or (sceneEpisode is None)): return (sceneSeason, sceneEpisode) indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) dbData = [x[u'doc'] for x in sickrage.srCore.mainDB.db.get_many(u'tv_episodes', indexer_id, with_doc=True) if ((x[u'doc'][u'indexer'] == indexer) and (x[u'doc'][u'scene_season'] == sceneSeason) and (x[u'doc'][u'scene_episode'] == sceneEpisode))] if dbData: return (int((dbData[0][u'season'] or 0)), int((dbData[0][u'episode'] or 0))) return (sceneSeason, sceneEpisode)
[ "def", "get_indexer_numbering_for_xem", "(", "indexer_id", ",", "indexer", ",", "sceneSeason", ",", "sceneEpisode", ")", ":", "if", "(", "(", "indexer_id", "is", "None", ")", "or", "(", "sceneSeason", "is", "None", ")", "or", "(", "sceneEpisode", "is", "None", ")", ")", ":", "return", "(", "sceneSeason", ",", "sceneEpisode", ")", "indexer_id", "=", "int", "(", "indexer_id", ")", "indexer", "=", "int", "(", "indexer", ")", "xem_refresh", "(", "indexer_id", ",", "indexer", ")", "dbData", "=", "[", "x", "[", "u'doc'", "]", "for", "x", "in", "sickrage", ".", "srCore", ".", "mainDB", ".", "db", ".", "get_many", "(", "u'tv_episodes'", ",", "indexer_id", ",", "with_doc", "=", "True", ")", "if", "(", "(", "x", "[", "u'doc'", "]", "[", "u'indexer'", "]", "==", "indexer", ")", "and", "(", "x", "[", "u'doc'", "]", "[", "u'scene_season'", "]", "==", "sceneSeason", ")", "and", "(", "x", "[", "u'doc'", "]", "[", "u'scene_episode'", "]", "==", "sceneEpisode", ")", ")", "]", "if", "dbData", ":", "return", "(", "int", "(", "(", "dbData", "[", "0", "]", "[", "u'season'", "]", "or", "0", ")", ")", ",", "int", "(", "(", "dbData", "[", "0", "]", "[", "u'episode'", "]", "or", "0", ")", ")", ")", "return", "(", "sceneSeason", ",", "sceneEpisode", ")" ]
reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering .
train
false
6,598
def _get_wiki_link(title, locale): from kitsune.wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: while d.redirect_document(): d = d.redirect_document() url = reverse('wiki.document', locale=locale, args=[d.slug]) return {'found': True, 'url': url, 'text': d.title} from kitsune.sumo.templatetags.jinja_helpers import urlparams return {'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title)}
[ "def", "_get_wiki_link", "(", "title", ",", "locale", ")", ":", "from", "kitsune", ".", "wiki", ".", "models", "import", "Document", "d", "=", "get_object_fallback", "(", "Document", ",", "locale", "=", "locale", ",", "title", "=", "title", ",", "is_template", "=", "False", ")", "if", "d", ":", "while", "d", ".", "redirect_document", "(", ")", ":", "d", "=", "d", ".", "redirect_document", "(", ")", "url", "=", "reverse", "(", "'wiki.document'", ",", "locale", "=", "locale", ",", "args", "=", "[", "d", ".", "slug", "]", ")", "return", "{", "'found'", ":", "True", ",", "'url'", ":", "url", ",", "'text'", ":", "d", ".", "title", "}", "from", "kitsune", ".", "sumo", ".", "templatetags", ".", "jinja_helpers", "import", "urlparams", "return", "{", "'found'", ":", "False", ",", "'text'", ":", "title", ",", "'url'", ":", "urlparams", "(", "reverse", "(", "'wiki.new_document'", ",", "locale", "=", "locale", ")", ",", "title", "=", "title", ")", "}" ]
checks the page exists .
train
false
6,599
def _isolate_zero(f, K, inf, sup, basis=False, sqf=False): (j, f) = dup_terms_gcd(f, K) if (j > 0): F = K.get_field() if (((inf is None) or (inf <= 0)) and ((sup is None) or (0 <= sup))): if (not sqf): if (not basis): return ([((F.zero, F.zero), j)], f) else: return ([((F.zero, F.zero), j, [K.one, K.zero])], f) else: return ([(F.zero, F.zero)], f) return ([], f)
[ "def", "_isolate_zero", "(", "f", ",", "K", ",", "inf", ",", "sup", ",", "basis", "=", "False", ",", "sqf", "=", "False", ")", ":", "(", "j", ",", "f", ")", "=", "dup_terms_gcd", "(", "f", ",", "K", ")", "if", "(", "j", ">", "0", ")", ":", "F", "=", "K", ".", "get_field", "(", ")", "if", "(", "(", "(", "inf", "is", "None", ")", "or", "(", "inf", "<=", "0", ")", ")", "and", "(", "(", "sup", "is", "None", ")", "or", "(", "0", "<=", "sup", ")", ")", ")", ":", "if", "(", "not", "sqf", ")", ":", "if", "(", "not", "basis", ")", ":", "return", "(", "[", "(", "(", "F", ".", "zero", ",", "F", ".", "zero", ")", ",", "j", ")", "]", ",", "f", ")", "else", ":", "return", "(", "[", "(", "(", "F", ".", "zero", ",", "F", ".", "zero", ")", ",", "j", ",", "[", "K", ".", "one", ",", "K", ".", "zero", "]", ")", "]", ",", "f", ")", "else", ":", "return", "(", "[", "(", "F", ".", "zero", ",", "F", ".", "zero", ")", "]", ",", "f", ")", "return", "(", "[", "]", ",", "f", ")" ]
handle special case of cf algorithm when f is homogeneous .
train
false
6,600
def make_label_dec(label, ds=None): warnings.warn('The function `make_label_dec` is deprecated since IPython 4.0', DeprecationWarning, stacklevel=2) if isinstance(label, str): labels = [label] else: labels = label tmp = (lambda : None) for label in labels: setattr(tmp, label, True) def decor(f): for label in labels: setattr(f, label, True) return f if (ds is None): ds = ('Labels a test as %r.' % label) decor.__doc__ = ds return decor
[ "def", "make_label_dec", "(", "label", ",", "ds", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'The function `make_label_dec` is deprecated since IPython 4.0'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "isinstance", "(", "label", ",", "str", ")", ":", "labels", "=", "[", "label", "]", "else", ":", "labels", "=", "label", "tmp", "=", "(", "lambda", ":", "None", ")", "for", "label", "in", "labels", ":", "setattr", "(", "tmp", ",", "label", ",", "True", ")", "def", "decor", "(", "f", ")", ":", "for", "label", "in", "labels", ":", "setattr", "(", "f", ",", "label", ",", "True", ")", "return", "f", "if", "(", "ds", "is", "None", ")", ":", "ds", "=", "(", "'Labels a test as %r.'", "%", "label", ")", "decor", ".", "__doc__", "=", "ds", "return", "decor" ]
factory function to create a decorator that applies one or more labels .
train
false
6,601
def make_theano_rng(rng_or_seed=None, default_seed=None, which_method=None): return make_rng(rng_or_seed, default_seed, which_method, RandomStreams)
[ "def", "make_theano_rng", "(", "rng_or_seed", "=", "None", ",", "default_seed", "=", "None", ",", "which_method", "=", "None", ")", ":", "return", "make_rng", "(", "rng_or_seed", ",", "default_seed", ",", "which_method", ",", "RandomStreams", ")" ]
returns a theano randomstreams .
train
false
6,602
def comb_w_rep(n, k): if (k == 0): return [[]] combs = [[i] for i in range(n)] for i in range((k - 1)): curr = [] for p in combs: for m in range(p[(-1)], n): curr.append((p + [m])) combs = curr return combs
[ "def", "comb_w_rep", "(", "n", ",", "k", ")", ":", "if", "(", "k", "==", "0", ")", ":", "return", "[", "[", "]", "]", "combs", "=", "[", "[", "i", "]", "for", "i", "in", "range", "(", "n", ")", "]", "for", "i", "in", "range", "(", "(", "k", "-", "1", ")", ")", ":", "curr", "=", "[", "]", "for", "p", "in", "combs", ":", "for", "m", "in", "range", "(", "p", "[", "(", "-", "1", ")", "]", ",", "n", ")", ":", "curr", ".", "append", "(", "(", "p", "+", "[", "m", "]", ")", ")", "combs", "=", "curr", "return", "combs" ]
combinations with repetition returns the list of k combinations with repetition from n objects .
train
false
6,604
def has_mixed_eol_chars(text): eol_chars = get_eol_chars(text) if (eol_chars is None): return False correct_text = eol_chars.join((text + eol_chars).splitlines()) return (repr(correct_text) != repr(text))
[ "def", "has_mixed_eol_chars", "(", "text", ")", ":", "eol_chars", "=", "get_eol_chars", "(", "text", ")", "if", "(", "eol_chars", "is", "None", ")", ":", "return", "False", "correct_text", "=", "eol_chars", ".", "join", "(", "(", "text", "+", "eol_chars", ")", ".", "splitlines", "(", ")", ")", "return", "(", "repr", "(", "correct_text", ")", "!=", "repr", "(", "text", ")", ")" ]
detect if text has mixed eol characters .
train
true
6,605
def middleware(application, global_conf=None): def lint_app(*args, **kw): assert (len(args) == 2), u'Two arguments required' assert (not kw), u'No keyword arguments allowed' (environ, start_response) = args check_environ(environ) start_response_started = [] def start_response_wrapper(*args, **kw): assert ((len(args) == 2) or (len(args) == 3)), (u'Invalid number of arguments: %s' % args) assert (not kw), u'No keyword arguments allowed' status = args[0] headers = args[1] if (len(args) == 3): exc_info = args[2] else: exc_info = None check_status(status) check_headers(headers) check_content_type(status, headers) check_exc_info(exc_info) start_response_started.append(None) return WriteWrapper(start_response(*args)) environ[u'wsgi.input'] = InputWrapper(environ[u'wsgi.input']) environ[u'wsgi.errors'] = ErrorWrapper(environ[u'wsgi.errors']) iterator = application(environ, start_response_wrapper) assert isinstance(iterator, collections.Iterable), u'The application must return an iterator, if only an empty list' check_iterator(iterator) return IteratorWrapper(iterator, start_response_started) return lint_app
[ "def", "middleware", "(", "application", ",", "global_conf", "=", "None", ")", ":", "def", "lint_app", "(", "*", "args", ",", "**", "kw", ")", ":", "assert", "(", "len", "(", "args", ")", "==", "2", ")", ",", "u'Two arguments required'", "assert", "(", "not", "kw", ")", ",", "u'No keyword arguments allowed'", "(", "environ", ",", "start_response", ")", "=", "args", "check_environ", "(", "environ", ")", "start_response_started", "=", "[", "]", "def", "start_response_wrapper", "(", "*", "args", ",", "**", "kw", ")", ":", "assert", "(", "(", "len", "(", "args", ")", "==", "2", ")", "or", "(", "len", "(", "args", ")", "==", "3", ")", ")", ",", "(", "u'Invalid number of arguments: %s'", "%", "args", ")", "assert", "(", "not", "kw", ")", ",", "u'No keyword arguments allowed'", "status", "=", "args", "[", "0", "]", "headers", "=", "args", "[", "1", "]", "if", "(", "len", "(", "args", ")", "==", "3", ")", ":", "exc_info", "=", "args", "[", "2", "]", "else", ":", "exc_info", "=", "None", "check_status", "(", "status", ")", "check_headers", "(", "headers", ")", "check_content_type", "(", "status", ",", "headers", ")", "check_exc_info", "(", "exc_info", ")", "start_response_started", ".", "append", "(", "None", ")", "return", "WriteWrapper", "(", "start_response", "(", "*", "args", ")", ")", "environ", "[", "u'wsgi.input'", "]", "=", "InputWrapper", "(", "environ", "[", "u'wsgi.input'", "]", ")", "environ", "[", "u'wsgi.errors'", "]", "=", "ErrorWrapper", "(", "environ", "[", "u'wsgi.errors'", "]", ")", "iterator", "=", "application", "(", "environ", ",", "start_response_wrapper", ")", "assert", "isinstance", "(", "iterator", ",", "collections", ".", "Iterable", ")", ",", "u'The application must return an iterator, if only an empty list'", "check_iterator", "(", "iterator", ")", "return", "IteratorWrapper", "(", "iterator", ",", "start_response_started", ")", "return", "lint_app" ]
when applied between a wsgi server and a wsgi application .
train
false
6,606
def f_3(): __doc__ = 'new f 3 doc' return __doc__
[ "def", "f_3", "(", ")", ":", "__doc__", "=", "'new f 3 doc'", "return", "__doc__" ]
f 3 doc .
train
false
6,607
def build_pattern(log_format): if (log_format == 'combined'): log_format = LOG_FORMAT_COMBINED elif (log_format == 'common'): log_format = LOG_FORMAT_COMMON pattern = re.sub(REGEX_SPECIAL_CHARS, '\\\\\\1', log_format) pattern = re.sub(REGEX_LOG_FORMAT_VARIABLE, '(?P<\\1>.*)', pattern) return re.compile(pattern)
[ "def", "build_pattern", "(", "log_format", ")", ":", "if", "(", "log_format", "==", "'combined'", ")", ":", "log_format", "=", "LOG_FORMAT_COMBINED", "elif", "(", "log_format", "==", "'common'", ")", ":", "log_format", "=", "LOG_FORMAT_COMMON", "pattern", "=", "re", ".", "sub", "(", "REGEX_SPECIAL_CHARS", ",", "'\\\\\\\\\\\\1'", ",", "log_format", ")", "pattern", "=", "re", ".", "sub", "(", "REGEX_LOG_FORMAT_VARIABLE", ",", "'(?P<\\\\1>.*)'", ",", "pattern", ")", "return", "re", ".", "compile", "(", "pattern", ")" ]
build regular expression to parse given format .
train
true
6,608
def cocktail_shaker_sort(unsorted): for i in range((len(unsorted) - 1), 0, (-1)): swapped = False for j in range(i, 0, (-1)): if (unsorted[j] < unsorted[(j - 1)]): (unsorted[j], unsorted[(j - 1)]) = (unsorted[(j - 1)], unsorted[j]) swapped = True for j in range(i): if (unsorted[j] > unsorted[(j + 1)]): (unsorted[j], unsorted[(j + 1)]) = (unsorted[(j + 1)], unsorted[j]) swapped = True if (not swapped): return unsorted
[ "def", "cocktail_shaker_sort", "(", "unsorted", ")", ":", "for", "i", "in", "range", "(", "(", "len", "(", "unsorted", ")", "-", "1", ")", ",", "0", ",", "(", "-", "1", ")", ")", ":", "swapped", "=", "False", "for", "j", "in", "range", "(", "i", ",", "0", ",", "(", "-", "1", ")", ")", ":", "if", "(", "unsorted", "[", "j", "]", "<", "unsorted", "[", "(", "j", "-", "1", ")", "]", ")", ":", "(", "unsorted", "[", "j", "]", ",", "unsorted", "[", "(", "j", "-", "1", ")", "]", ")", "=", "(", "unsorted", "[", "(", "j", "-", "1", ")", "]", ",", "unsorted", "[", "j", "]", ")", "swapped", "=", "True", "for", "j", "in", "range", "(", "i", ")", ":", "if", "(", "unsorted", "[", "j", "]", ">", "unsorted", "[", "(", "j", "+", "1", ")", "]", ")", ":", "(", "unsorted", "[", "j", "]", ",", "unsorted", "[", "(", "j", "+", "1", ")", "]", ")", "=", "(", "unsorted", "[", "(", "j", "+", "1", ")", "]", ",", "unsorted", "[", "j", "]", ")", "swapped", "=", "True", "if", "(", "not", "swapped", ")", ":", "return", "unsorted" ]
pure implementation of the cocktail shaker sort algorithm in python .
train
false
6,609
def template_assets_path(instance, filename): name = os.path.join('certificate_template_assets', str(instance.id), filename) fullname = os.path.join(settings.MEDIA_ROOT, name) if os.path.exists(fullname): os.remove(fullname) return name
[ "def", "template_assets_path", "(", "instance", ",", "filename", ")", ":", "name", "=", "os", ".", "path", ".", "join", "(", "'certificate_template_assets'", ",", "str", "(", "instance", ".", "id", ")", ",", "filename", ")", "fullname", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "name", ")", "if", "os", ".", "path", ".", "exists", "(", "fullname", ")", ":", "os", ".", "remove", "(", "fullname", ")", "return", "name" ]
delete the file if it already exist and returns the certificate template asset file path .
train
false
6,611
def process_stdin(line): if (line is None): sys.exit(0) if (mpstate.functions.input_handler is not None): mpstate.functions.input_handler(line) return line = line.strip() if mpstate.status.setup_mode: if (line == '.'): mpstate.status.setup_mode = False mpstate.status.flightmode = 'MAV' mpstate.rl.set_prompt('MAV> ') return if (line != '+++'): line += '\r' for c in line: time.sleep(0.01) mpstate.master().write(c) return if (not line): return args = shlex_quotes(line) cmd = args[0] while (cmd in mpstate.aliases): line = mpstate.aliases[cmd] args = (shlex.split(line) + args[1:]) cmd = args[0] if (cmd == 'help'): k = command_map.keys() k.sort() for cmd in k: (fn, help) = command_map[cmd] print ('%-15s : %s' % (cmd, help)) return if ((cmd == 'exit') and mpstate.settings.requireexit): mpstate.status.exit = True return if (not (cmd in command_map)): for (m, pm) in mpstate.modules: if hasattr(m, 'unknown_command'): try: if m.unknown_command(args): return except Exception as e: print ('ERROR in command: %s' % str(e)) print ("Unknown command '%s'" % line) return (fn, help) = command_map[cmd] try: fn(args[1:]) except Exception as e: print ('ERROR in command %s: %s' % (args[1:], str(e))) if (mpstate.settings.moddebug > 1): traceback.print_exc()
[ "def", "process_stdin", "(", "line", ")", ":", "if", "(", "line", "is", "None", ")", ":", "sys", ".", "exit", "(", "0", ")", "if", "(", "mpstate", ".", "functions", ".", "input_handler", "is", "not", "None", ")", ":", "mpstate", ".", "functions", ".", "input_handler", "(", "line", ")", "return", "line", "=", "line", ".", "strip", "(", ")", "if", "mpstate", ".", "status", ".", "setup_mode", ":", "if", "(", "line", "==", "'.'", ")", ":", "mpstate", ".", "status", ".", "setup_mode", "=", "False", "mpstate", ".", "status", ".", "flightmode", "=", "'MAV'", "mpstate", ".", "rl", ".", "set_prompt", "(", "'MAV> '", ")", "return", "if", "(", "line", "!=", "'+++'", ")", ":", "line", "+=", "'\\r'", "for", "c", "in", "line", ":", "time", ".", "sleep", "(", "0.01", ")", "mpstate", ".", "master", "(", ")", ".", "write", "(", "c", ")", "return", "if", "(", "not", "line", ")", ":", "return", "args", "=", "shlex_quotes", "(", "line", ")", "cmd", "=", "args", "[", "0", "]", "while", "(", "cmd", "in", "mpstate", ".", "aliases", ")", ":", "line", "=", "mpstate", ".", "aliases", "[", "cmd", "]", "args", "=", "(", "shlex", ".", "split", "(", "line", ")", "+", "args", "[", "1", ":", "]", ")", "cmd", "=", "args", "[", "0", "]", "if", "(", "cmd", "==", "'help'", ")", ":", "k", "=", "command_map", ".", "keys", "(", ")", "k", ".", "sort", "(", ")", "for", "cmd", "in", "k", ":", "(", "fn", ",", "help", ")", "=", "command_map", "[", "cmd", "]", "print", "(", "'%-15s : %s'", "%", "(", "cmd", ",", "help", ")", ")", "return", "if", "(", "(", "cmd", "==", "'exit'", ")", "and", "mpstate", ".", "settings", ".", "requireexit", ")", ":", "mpstate", ".", "status", ".", "exit", "=", "True", "return", "if", "(", "not", "(", "cmd", "in", "command_map", ")", ")", ":", "for", "(", "m", ",", "pm", ")", "in", "mpstate", ".", "modules", ":", "if", "hasattr", "(", "m", ",", "'unknown_command'", ")", ":", "try", ":", "if", "m", ".", "unknown_command", "(", "args", ")", ":", "return", "except", "Exception", "as", "e", ":", "print", "(", "'ERROR in command: %s'", "%", "str", "(", "e", ")", ")", "print", "(", "\"Unknown command '%s'\"", "%", "line", ")", "return", "(", "fn", ",", "help", ")", "=", "command_map", "[", "cmd", "]", "try", ":", "fn", "(", "args", "[", "1", ":", "]", ")", "except", "Exception", "as", "e", ":", "print", "(", "'ERROR in command %s: %s'", "%", "(", "args", "[", "1", ":", "]", ",", "str", "(", "e", ")", ")", ")", "if", "(", "mpstate", ".", "settings", ".", "moddebug", ">", "1", ")", ":", "traceback", ".", "print_exc", "(", ")" ]
handle commands from user .
train
true
6,612
def clustermap(data, pivot_kws=None, method='average', metric='euclidean', z_score=None, standard_scale=None, figsize=None, cbar_kws=None, row_cluster=True, col_cluster=True, row_linkage=None, col_linkage=None, row_colors=None, col_colors=None, mask=None, **kwargs): plotter = ClusterGrid(data, pivot_kws=pivot_kws, figsize=figsize, row_colors=row_colors, col_colors=col_colors, z_score=z_score, standard_scale=standard_scale, mask=mask) return plotter.plot(metric=metric, method=method, colorbar_kws=cbar_kws, row_cluster=row_cluster, col_cluster=col_cluster, row_linkage=row_linkage, col_linkage=col_linkage, **kwargs)
[ "def", "clustermap", "(", "data", ",", "pivot_kws", "=", "None", ",", "method", "=", "'average'", ",", "metric", "=", "'euclidean'", ",", "z_score", "=", "None", ",", "standard_scale", "=", "None", ",", "figsize", "=", "None", ",", "cbar_kws", "=", "None", ",", "row_cluster", "=", "True", ",", "col_cluster", "=", "True", ",", "row_linkage", "=", "None", ",", "col_linkage", "=", "None", ",", "row_colors", "=", "None", ",", "col_colors", "=", "None", ",", "mask", "=", "None", ",", "**", "kwargs", ")", ":", "plotter", "=", "ClusterGrid", "(", "data", ",", "pivot_kws", "=", "pivot_kws", ",", "figsize", "=", "figsize", ",", "row_colors", "=", "row_colors", ",", "col_colors", "=", "col_colors", ",", "z_score", "=", "z_score", ",", "standard_scale", "=", "standard_scale", ",", "mask", "=", "mask", ")", "return", "plotter", ".", "plot", "(", "metric", "=", "metric", ",", "method", "=", "method", ",", "colorbar_kws", "=", "cbar_kws", ",", "row_cluster", "=", "row_cluster", ",", "col_cluster", "=", "col_cluster", ",", "row_linkage", "=", "row_linkage", ",", "col_linkage", "=", "col_linkage", ",", "**", "kwargs", ")" ]
plot a hierarchically clustered heatmap of a pandas dataframe parameters data: pandas .
train
false
6,613
def get_machine_id_change_spec(client_factory, machine_id_str): virtual_machine_config_spec = client_factory.create('ns0:VirtualMachineConfigSpec') opt = client_factory.create('ns0:OptionValue') opt.key = 'machine.id' opt.value = machine_id_str virtual_machine_config_spec.extraConfig = [opt] return virtual_machine_config_spec
[ "def", "get_machine_id_change_spec", "(", "client_factory", ",", "machine_id_str", ")", ":", "virtual_machine_config_spec", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineConfigSpec'", ")", "opt", "=", "client_factory", ".", "create", "(", "'ns0:OptionValue'", ")", "opt", ".", "key", "=", "'machine.id'", "opt", ".", "value", "=", "machine_id_str", "virtual_machine_config_spec", ".", "extraConfig", "=", "[", "opt", "]", "return", "virtual_machine_config_spec" ]
builds the machine id change config spec .
train
false
6,614
def retry_argument_spec(spec=None): arg_spec = dict(retries=dict(type='int'), retry_pause=dict(type='float', default=1)) if spec: arg_spec.update(spec) return arg_spec
[ "def", "retry_argument_spec", "(", "spec", "=", "None", ")", ":", "arg_spec", "=", "dict", "(", "retries", "=", "dict", "(", "type", "=", "'int'", ")", ",", "retry_pause", "=", "dict", "(", "type", "=", "'float'", ",", "default", "=", "1", ")", ")", "if", "spec", ":", "arg_spec", ".", "update", "(", "spec", ")", "return", "arg_spec" ]
creates an argument spec for working with retrying .
train
false