id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
7,949
def organization_list(context, data_dict): _check_access('organization_list', context, data_dict) data_dict['groups'] = data_dict.pop('organizations', []) data_dict.setdefault('type', 'organization') return _group_or_org_list(context, data_dict, is_org=True)
[ "def", "organization_list", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'organization_list'", ",", "context", ",", "data_dict", ")", "data_dict", "[", "'groups'", "]", "=", "data_dict", ".", "pop", "(", "'organizations'", ",", "[", "]", ")", "data_dict", ".", "setdefault", "(", "'type'", ",", "'organization'", ")", "return", "_group_or_org_list", "(", "context", ",", "data_dict", ",", "is_org", "=", "True", ")" ]
return a list of the names of the sites organizations .
train
false
7,950
def xl_range_formula(sheetname, first_row, first_col, last_row, last_col): cell_range = xl_range_abs(first_row, first_col, last_row, last_col) sheetname = quote_sheetname(sheetname) return ((sheetname + '!') + cell_range)
[ "def", "xl_range_formula", "(", "sheetname", ",", "first_row", ",", "first_col", ",", "last_row", ",", "last_col", ")", ":", "cell_range", "=", "xl_range_abs", "(", "first_row", ",", "first_col", ",", "last_row", ",", "last_col", ")", "sheetname", "=", "quote_sheetname", "(", "sheetname", ")", "return", "(", "(", "sheetname", "+", "'!'", ")", "+", "cell_range", ")" ]
convert worksheet name and zero indexed row and col cell references to a sheet1!a1:b1 range formula string .
train
false
7,952
def host_create(host, groups, interfaces, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'host.create' params = {'host': host} if (not isinstance(groups, list)): groups = [groups] grps = [] for group in groups: grps.append({'groupid': group}) params['groups'] = grps if (not isinstance(interfaces, list)): interfaces = [interfaces] params['interfaces'] = interfaces params = _params_extend(params, _ignore_name=True, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result']['hostids'] else: raise KeyError except KeyError: return ret
[ "def", "host_create", "(", "host", ",", "groups", ",", "interfaces", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'host.create'", "params", "=", "{", "'host'", ":", "host", "}", "if", "(", "not", "isinstance", "(", "groups", ",", "list", ")", ")", ":", "groups", "=", "[", "groups", "]", "grps", "=", "[", "]", "for", "group", "in", "groups", ":", "grps", ".", "append", "(", "{", "'groupid'", ":", "group", "}", ")", "params", "[", "'groups'", "]", "=", "grps", "if", "(", "not", "isinstance", "(", "interfaces", ",", "list", ")", ")", ":", "interfaces", "=", "[", "interfaces", "]", "params", "[", "'interfaces'", "]", "=", "interfaces", "params", "=", "_params_extend", "(", "params", ",", "_ignore_name", "=", "True", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'hostids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
create new host .
train
true
7,953
def lazycache(filename, module_globals): if (filename in cache): if (len(cache[filename]) == 1): return True else: return False if ((not filename) or (filename.startswith('<') and filename.endswith('>'))): return False if (module_globals and ('__loader__' in module_globals)): name = module_globals.get('__name__') loader = module_globals['__loader__'] get_source = getattr(loader, 'get_source', None) if (name and get_source): get_lines = functools.partial(get_source, name) cache[filename] = (get_lines,) return True return False
[ "def", "lazycache", "(", "filename", ",", "module_globals", ")", ":", "if", "(", "filename", "in", "cache", ")", ":", "if", "(", "len", "(", "cache", "[", "filename", "]", ")", "==", "1", ")", ":", "return", "True", "else", ":", "return", "False", "if", "(", "(", "not", "filename", ")", "or", "(", "filename", ".", "startswith", "(", "'<'", ")", "and", "filename", ".", "endswith", "(", "'>'", ")", ")", ")", ":", "return", "False", "if", "(", "module_globals", "and", "(", "'__loader__'", "in", "module_globals", ")", ")", ":", "name", "=", "module_globals", ".", "get", "(", "'__name__'", ")", "loader", "=", "module_globals", "[", "'__loader__'", "]", "get_source", "=", "getattr", "(", "loader", ",", "'get_source'", ",", "None", ")", "if", "(", "name", "and", "get_source", ")", ":", "get_lines", "=", "functools", ".", "partial", "(", "get_source", ",", "name", ")", "cache", "[", "filename", "]", "=", "(", "get_lines", ",", ")", "return", "True", "return", "False" ]
seed the cache for filename with module_globals .
train
false
7,955
def safewrite(filename, content): f = file((filename + '.tmp'), 'w') f.write(content) f.close() os.rename(f.name, filename)
[ "def", "safewrite", "(", "filename", ",", "content", ")", ":", "f", "=", "file", "(", "(", "filename", "+", "'.tmp'", ")", ",", "'w'", ")", "f", ".", "write", "(", "content", ")", "f", ".", "close", "(", ")", "os", ".", "rename", "(", "f", ".", "name", ",", "filename", ")" ]
writes the content to a temp file and then moves the temp file to given filename to avoid overwriting the existing file in case of errors .
train
false
7,956
def validate_timeout_or_zero(option, value): if (value is None): raise ConfigurationError(('%s cannot be None' % (option,))) if ((value == 0) or (value == '0')): return 0 return (validate_positive_float(option, value) / 1000.0)
[ "def", "validate_timeout_or_zero", "(", "option", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "raise", "ConfigurationError", "(", "(", "'%s cannot be None'", "%", "(", "option", ",", ")", ")", ")", "if", "(", "(", "value", "==", "0", ")", "or", "(", "value", "==", "'0'", ")", ")", ":", "return", "0", "return", "(", "validate_positive_float", "(", "option", ",", "value", ")", "/", "1000.0", ")" ]
validates a timeout specified in milliseconds returning a value in floating point seconds for the case where none is an error and 0 is valid .
train
true
7,957
def encode_block(payload, requester, responder): assert (len(requester[1].public_key) == PK_LENGTH) assert (len(responder[1].public_key) == PK_LENGTH) return pack(crawl_response_format, *(payload.up, payload.down, payload.total_up_requester, payload.total_down_requester, payload.sequence_number_requester, payload.previous_hash_requester, payload.total_up_responder, payload.total_down_responder, payload.sequence_number_responder, payload.previous_hash_responder, requester[1].public_key, requester[0], responder[1].public_key, responder[0]))
[ "def", "encode_block", "(", "payload", ",", "requester", ",", "responder", ")", ":", "assert", "(", "len", "(", "requester", "[", "1", "]", ".", "public_key", ")", "==", "PK_LENGTH", ")", "assert", "(", "len", "(", "responder", "[", "1", "]", ".", "public_key", ")", "==", "PK_LENGTH", ")", "return", "pack", "(", "crawl_response_format", ",", "*", "(", "payload", ".", "up", ",", "payload", ".", "down", ",", "payload", ".", "total_up_requester", ",", "payload", ".", "total_down_requester", ",", "payload", ".", "sequence_number_requester", ",", "payload", ".", "previous_hash_requester", ",", "payload", ".", "total_up_responder", ",", "payload", ".", "total_down_responder", ",", "payload", ".", "sequence_number_responder", ",", "payload", ".", "previous_hash_responder", ",", "requester", "[", "1", "]", ".", "public_key", ",", "requester", "[", "0", "]", ",", "responder", "[", "1", "]", ".", "public_key", ",", "responder", "[", "0", "]", ")", ")" ]
this function encodes a block .
train
false
7,958
def row_scale(x, s): return col_scale(x.T, s).T
[ "def", "row_scale", "(", "x", ",", "s", ")", ":", "return", "col_scale", "(", "x", ".", "T", ",", "s", ")", ".", "T" ]
scale each row of a sparse matrix by the corresponding element of a dense vector .
train
false
7,960
def fix_ns(xpath, split=False, propagate_ns=True, default_ns=u''): fixed = [] ns_blocks = xpath.split(u'{') for ns_block in ns_blocks: if (u'}' in ns_block): namespace = ns_block.split(u'}')[0] elements = ns_block.split(u'}')[1].split(u'/') else: namespace = default_ns elements = ns_block.split(u'/') for element in elements: if element: if (propagate_ns and (element[0] != u'*')): tag = (u'{%s}%s' % (namespace, element)) else: tag = element fixed.append(tag) if split: return fixed return u'/'.join(fixed)
[ "def", "fix_ns", "(", "xpath", ",", "split", "=", "False", ",", "propagate_ns", "=", "True", ",", "default_ns", "=", "u''", ")", ":", "fixed", "=", "[", "]", "ns_blocks", "=", "xpath", ".", "split", "(", "u'{'", ")", "for", "ns_block", "in", "ns_blocks", ":", "if", "(", "u'}'", "in", "ns_block", ")", ":", "namespace", "=", "ns_block", ".", "split", "(", "u'}'", ")", "[", "0", "]", "elements", "=", "ns_block", ".", "split", "(", "u'}'", ")", "[", "1", "]", ".", "split", "(", "u'/'", ")", "else", ":", "namespace", "=", "default_ns", "elements", "=", "ns_block", ".", "split", "(", "u'/'", ")", "for", "element", "in", "elements", ":", "if", "element", ":", "if", "(", "propagate_ns", "and", "(", "element", "[", "0", "]", "!=", "u'*'", ")", ")", ":", "tag", "=", "(", "u'{%s}%s'", "%", "(", "namespace", ",", "element", ")", ")", "else", ":", "tag", "=", "element", "fixed", ".", "append", "(", "tag", ")", "if", "split", ":", "return", "fixed", "return", "u'/'", ".", "join", "(", "fixed", ")" ]
apply the stanzas namespace to elements in an xpath expression .
train
false
7,962
def console_encode(string, errors='replace', stream=sys.__stdout__): encoding = _get_console_encoding(stream) if (PY3 and (encoding != 'UTF-8')): return string.encode(encoding, errors).decode(encoding) if (PY3 or IRONPYTHON): return string return string.encode(encoding, errors)
[ "def", "console_encode", "(", "string", ",", "errors", "=", "'replace'", ",", "stream", "=", "sys", ".", "__stdout__", ")", ":", "encoding", "=", "_get_console_encoding", "(", "stream", ")", "if", "(", "PY3", "and", "(", "encoding", "!=", "'UTF-8'", ")", ")", ":", "return", "string", ".", "encode", "(", "encoding", ",", "errors", ")", ".", "decode", "(", "encoding", ")", "if", "(", "PY3", "or", "IRONPYTHON", ")", ":", "return", "string", "return", "string", ".", "encode", "(", "encoding", ",", "errors", ")" ]
encodes unicode to bytes in console or system encoding .
train
false
7,963
def create_typestr2type_dicts(dont_include_in_type2typestr=['lambda']): typenamelist = [tname for tname in dir(types) if tname.endswith('Type')] (typestr2type, type2typestr) = ({}, {}) for tname in typenamelist: name = tname[:(-4)].lower() obj = getattr(types, tname) typestr2type[name] = obj if (name not in dont_include_in_type2typestr): type2typestr[obj] = name return (typestr2type, type2typestr)
[ "def", "create_typestr2type_dicts", "(", "dont_include_in_type2typestr", "=", "[", "'lambda'", "]", ")", ":", "typenamelist", "=", "[", "tname", "for", "tname", "in", "dir", "(", "types", ")", "if", "tname", ".", "endswith", "(", "'Type'", ")", "]", "(", "typestr2type", ",", "type2typestr", ")", "=", "(", "{", "}", ",", "{", "}", ")", "for", "tname", "in", "typenamelist", ":", "name", "=", "tname", "[", ":", "(", "-", "4", ")", "]", ".", "lower", "(", ")", "obj", "=", "getattr", "(", "types", ",", "tname", ")", "typestr2type", "[", "name", "]", "=", "obj", "if", "(", "name", "not", "in", "dont_include_in_type2typestr", ")", ":", "type2typestr", "[", "obj", "]", "=", "name", "return", "(", "typestr2type", ",", "type2typestr", ")" ]
return dictionaries mapping lower case typename to type objects from the types package .
train
true
7,966
def argsreduce(cond, *args): newargs = np.atleast_1d(*args) if (not isinstance(newargs, list)): newargs = [newargs] expand_arr = (cond == cond) return [np.extract(cond, (arr1 * expand_arr)) for arr1 in newargs]
[ "def", "argsreduce", "(", "cond", ",", "*", "args", ")", ":", "newargs", "=", "np", ".", "atleast_1d", "(", "*", "args", ")", "if", "(", "not", "isinstance", "(", "newargs", ",", "list", ")", ")", ":", "newargs", "=", "[", "newargs", "]", "expand_arr", "=", "(", "cond", "==", "cond", ")", "return", "[", "np", ".", "extract", "(", "cond", ",", "(", "arr1", "*", "expand_arr", ")", ")", "for", "arr1", "in", "newargs", "]" ]
return the sequence of ravel where ravel is true in 1d .
train
false
7,967
def load_spatial_filters(packed=True): names = ('Bilinear', 'Hanning', 'Hamming', 'Hermite', 'Kaiser', 'Quadric', 'Bicubic', 'CatRom', 'Mitchell', 'Spline16', 'Spline36', 'Gaussian', 'Bessel', 'Sinc', 'Lanczos', 'Blackman', 'Nearest') kernel = np.load(op.join(DATA_DIR, 'spatial-filters.npy')) if packed: kernel = pack_unit(kernel) return (kernel, names)
[ "def", "load_spatial_filters", "(", "packed", "=", "True", ")", ":", "names", "=", "(", "'Bilinear'", ",", "'Hanning'", ",", "'Hamming'", ",", "'Hermite'", ",", "'Kaiser'", ",", "'Quadric'", ",", "'Bicubic'", ",", "'CatRom'", ",", "'Mitchell'", ",", "'Spline16'", ",", "'Spline36'", ",", "'Gaussian'", ",", "'Bessel'", ",", "'Sinc'", ",", "'Lanczos'", ",", "'Blackman'", ",", "'Nearest'", ")", "kernel", "=", "np", ".", "load", "(", "op", ".", "join", "(", "DATA_DIR", ",", "'spatial-filters.npy'", ")", ")", "if", "packed", ":", "kernel", "=", "pack_unit", "(", "kernel", ")", "return", "(", "kernel", ",", "names", ")" ]
load spatial-filters kernel parameters packed : bool whether or not the data should be in "packed" representation for use in glsl code .
train
true
7,968
def get_container_info(env, app, swift_source=None): (version, account, container, unused) = split_path(env['PATH_INFO'], 3, 4, True) info = _get_info_from_caches(app, env, account, container) if (not info): env.setdefault('swift.infocache', {}) is_autocreate_account = account.startswith(getattr(app, 'auto_create_account_prefix', '.')) if (not is_autocreate_account): account_info = get_account_info(env, app, swift_source) if ((not account_info) or (not is_success(account_info['status']))): return headers_to_container_info({}, 0) req = _prepare_pre_auth_info_request(env, ('/%s/%s/%s' % (version, account, container)), (swift_source or 'GET_CONTAINER_INFO')) resp = req.get_response(app) info = _get_info_from_infocache(env, account, container) if (info is None): info = set_info_cache(app, env, account, container, resp) if info: info = deepcopy(info) else: info = headers_to_container_info({}, 0) if (('object_count' not in info) and ('container_size' in info)): info['object_count'] = info.pop('container_size') for field in ('storage_policy', 'bytes', 'object_count'): if (info.get(field) is None): info[field] = 0 else: info[field] = int(info[field]) return info
[ "def", "get_container_info", "(", "env", ",", "app", ",", "swift_source", "=", "None", ")", ":", "(", "version", ",", "account", ",", "container", ",", "unused", ")", "=", "split_path", "(", "env", "[", "'PATH_INFO'", "]", ",", "3", ",", "4", ",", "True", ")", "info", "=", "_get_info_from_caches", "(", "app", ",", "env", ",", "account", ",", "container", ")", "if", "(", "not", "info", ")", ":", "env", ".", "setdefault", "(", "'swift.infocache'", ",", "{", "}", ")", "is_autocreate_account", "=", "account", ".", "startswith", "(", "getattr", "(", "app", ",", "'auto_create_account_prefix'", ",", "'.'", ")", ")", "if", "(", "not", "is_autocreate_account", ")", ":", "account_info", "=", "get_account_info", "(", "env", ",", "app", ",", "swift_source", ")", "if", "(", "(", "not", "account_info", ")", "or", "(", "not", "is_success", "(", "account_info", "[", "'status'", "]", ")", ")", ")", ":", "return", "headers_to_container_info", "(", "{", "}", ",", "0", ")", "req", "=", "_prepare_pre_auth_info_request", "(", "env", ",", "(", "'/%s/%s/%s'", "%", "(", "version", ",", "account", ",", "container", ")", ")", ",", "(", "swift_source", "or", "'GET_CONTAINER_INFO'", ")", ")", "resp", "=", "req", ".", "get_response", "(", "app", ")", "info", "=", "_get_info_from_infocache", "(", "env", ",", "account", ",", "container", ")", "if", "(", "info", "is", "None", ")", ":", "info", "=", "set_info_cache", "(", "app", ",", "env", ",", "account", ",", "container", ",", "resp", ")", "if", "info", ":", "info", "=", "deepcopy", "(", "info", ")", "else", ":", "info", "=", "headers_to_container_info", "(", "{", "}", ",", "0", ")", "if", "(", "(", "'object_count'", "not", "in", "info", ")", "and", "(", "'container_size'", "in", "info", ")", ")", ":", "info", "[", "'object_count'", "]", "=", "info", ".", "pop", "(", "'container_size'", ")", "for", "field", "in", "(", "'storage_policy'", ",", "'bytes'", ",", "'object_count'", ")", ":", "if", "(", "info", ".", "get", "(", "field", ")", "is", "None", ")", ":", "info", "[", "field", "]", "=", "0", "else", ":", "info", "[", "field", "]", "=", "int", "(", "info", "[", "field", "]", ")", "return", "info" ]
get the info structure for a container .
train
false
7,969
def use_rand_uuid_instead_of_uuid4(logical_line, filename): if ('tempest/lib/' in filename): return if ('uuid.uuid4()' not in logical_line): return msg = 'T113: Tests should use data_utils.rand_uuid()/rand_uuid_hex() instead of uuid.uuid4()/uuid.uuid4().hex' (yield (0, msg))
[ "def", "use_rand_uuid_instead_of_uuid4", "(", "logical_line", ",", "filename", ")", ":", "if", "(", "'tempest/lib/'", "in", "filename", ")", ":", "return", "if", "(", "'uuid.uuid4()'", "not", "in", "logical_line", ")", ":", "return", "msg", "=", "'T113: Tests should use data_utils.rand_uuid()/rand_uuid_hex() instead of uuid.uuid4()/uuid.uuid4().hex'", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
check that tests use data_utils .
train
false
7,972
def user_log_dir(appname, appauthor=None, version=None, opinion=True): if (sys.platform == 'darwin'): path = os.path.join(os.path.expanduser('~/Library/Logs'), appname) elif (sys.platform == 'win32'): path = user_data_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, 'Logs') else: path = user_cache_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, 'log') if version: path = os.path.join(path, version) return path
[ "def", "user_log_dir", "(", "appname", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "opinion", "=", "True", ")", ":", "if", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~/Library/Logs'", ")", ",", "appname", ")", "elif", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "path", "=", "user_data_dir", "(", "appname", ",", "appauthor", ",", "version", ")", "version", "=", "False", "if", "opinion", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'Logs'", ")", "else", ":", "path", "=", "user_cache_dir", "(", "appname", ",", "appauthor", ",", "version", ")", "version", "=", "False", "if", "opinion", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'log'", ")", "if", "version", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "version", ")", "return", "path" ]
return full path to the user-specific log dir for this application .
train
true
7,973
def parameter_bank_names(device, bank_name_dict=BANK_NAME_DICT): if (device != None): if (device.class_name in bank_name_dict.keys()): return bank_name_dict[device.class_name] banks = number_of_parameter_banks(device) def _default_bank_name(bank_index): return ('Bank ' + str((bank_index + 1))) if ((device.class_name in MAX_DEVICES) and (banks != 0)): def _is_ascii(c): return (ord(c) < 128) def _bank_name(bank_index): try: name = device.get_bank_name(bank_index) except: name = None if name: return str(filter(_is_ascii, name)) else: return _default_bank_name(bank_index) return map(_bank_name, range(0, banks)) else: return map(_default_bank_name, range(0, banks)) return []
[ "def", "parameter_bank_names", "(", "device", ",", "bank_name_dict", "=", "BANK_NAME_DICT", ")", ":", "if", "(", "device", "!=", "None", ")", ":", "if", "(", "device", ".", "class_name", "in", "bank_name_dict", ".", "keys", "(", ")", ")", ":", "return", "bank_name_dict", "[", "device", ".", "class_name", "]", "banks", "=", "number_of_parameter_banks", "(", "device", ")", "def", "_default_bank_name", "(", "bank_index", ")", ":", "return", "(", "'Bank '", "+", "str", "(", "(", "bank_index", "+", "1", ")", ")", ")", "if", "(", "(", "device", ".", "class_name", "in", "MAX_DEVICES", ")", "and", "(", "banks", "!=", "0", ")", ")", ":", "def", "_is_ascii", "(", "c", ")", ":", "return", "(", "ord", "(", "c", ")", "<", "128", ")", "def", "_bank_name", "(", "bank_index", ")", ":", "try", ":", "name", "=", "device", ".", "get_bank_name", "(", "bank_index", ")", "except", ":", "name", "=", "None", "if", "name", ":", "return", "str", "(", "filter", "(", "_is_ascii", ",", "name", ")", ")", "else", ":", "return", "_default_bank_name", "(", "bank_index", ")", "return", "map", "(", "_bank_name", ",", "range", "(", "0", ",", "banks", ")", ")", "else", ":", "return", "map", "(", "_default_bank_name", ",", "range", "(", "0", ",", "banks", ")", ")", "return", "[", "]" ]
determine the bank names to use for a device .
train
false
7,974
def docker_client_version(): ua = flask.request.headers.get('user-agent', '') m = _re_docker_version.search(ua) if (not m): return version = m.group(1) if ('-' in version): version = version.split('-')[0] try: return tuple((int(x) for x in version)) except ValueError: return
[ "def", "docker_client_version", "(", ")", ":", "ua", "=", "flask", ".", "request", ".", "headers", ".", "get", "(", "'user-agent'", ",", "''", ")", "m", "=", "_re_docker_version", ".", "search", "(", "ua", ")", "if", "(", "not", "m", ")", ":", "return", "version", "=", "m", ".", "group", "(", "1", ")", "if", "(", "'-'", "in", "version", ")", ":", "version", "=", "version", ".", "split", "(", "'-'", ")", "[", "0", "]", "try", ":", "return", "tuple", "(", "(", "int", "(", "x", ")", "for", "x", "in", "version", ")", ")", "except", "ValueError", ":", "return" ]
try and extract the client version from the user-agent string so we can warn older versions of the docker engine/daemon about incompatible apis .
train
false
7,976
@utils.arg('id', metavar='<id>', help='Unique ID of the monitor type to delete') @utils.service_type('monitor') def do_type_delete(cs, args): cs.monitor_types.delete(args.id)
[ "@", "utils", ".", "arg", "(", "'id'", ",", "metavar", "=", "'<id>'", ",", "help", "=", "'Unique ID of the monitor type to delete'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_type_delete", "(", "cs", ",", "args", ")", ":", "cs", ".", "monitor_types", ".", "delete", "(", "args", ".", "id", ")" ]
delete a specific monitor type .
train
false
7,977
def get_drivers(): return [str(d) for d in drivers.values()]
[ "def", "get_drivers", "(", ")", ":", "return", "[", "str", "(", "d", ")", "for", "d", "in", "drivers", ".", "values", "(", ")", "]" ]
get the list of drivers currently registered with this api .
train
false
7,978
def _pretty_fe_relation(ferel): outstr = u'<{0.type.superFrameName}={0.frameRelation.superFrameName}.{0.superFEName} -- {0.type.name} -> {0.type.subFrameName}={0.frameRelation.subFrameName}.{0.subFEName}>'.format(ferel) return outstr
[ "def", "_pretty_fe_relation", "(", "ferel", ")", ":", "outstr", "=", "u'<{0.type.superFrameName}={0.frameRelation.superFrameName}.{0.superFEName} -- {0.type.name} -> {0.type.subFrameName}={0.frameRelation.subFrameName}.{0.subFEName}>'", ".", "format", "(", "ferel", ")", "return", "outstr" ]
helper function for pretty-printing an fe relation .
train
false
7,980
def desaturate(color, percent): return adjust(color, 1, (- percent))
[ "def", "desaturate", "(", "color", ",", "percent", ")", ":", "return", "adjust", "(", "color", ",", "1", ",", "(", "-", "percent", ")", ")" ]
decrease the saturation channel of a color by some percent .
train
false
7,981
@contextlib.contextmanager def temp_cwd(name='tempcwd', quiet=False): if (have_unicode and isinstance(name, unicode)): try: name = name.encode((sys.getfilesystemencoding() or 'ascii')) except UnicodeEncodeError: if (not quiet): raise unittest.SkipTest('unable to encode the cwd name with the filesystem encoding.') saved_dir = os.getcwd() is_temporary = False try: os.mkdir(name) os.chdir(name) is_temporary = True except OSError: if (not quiet): raise warnings.warn(('tests may fail, unable to change the CWD to ' + name), RuntimeWarning, stacklevel=3) try: (yield os.getcwd()) finally: os.chdir(saved_dir) if is_temporary: rmtree(name)
[ "@", "contextlib", ".", "contextmanager", "def", "temp_cwd", "(", "name", "=", "'tempcwd'", ",", "quiet", "=", "False", ")", ":", "if", "(", "have_unicode", "and", "isinstance", "(", "name", ",", "unicode", ")", ")", ":", "try", ":", "name", "=", "name", ".", "encode", "(", "(", "sys", ".", "getfilesystemencoding", "(", ")", "or", "'ascii'", ")", ")", "except", "UnicodeEncodeError", ":", "if", "(", "not", "quiet", ")", ":", "raise", "unittest", ".", "SkipTest", "(", "'unable to encode the cwd name with the filesystem encoding.'", ")", "saved_dir", "=", "os", ".", "getcwd", "(", ")", "is_temporary", "=", "False", "try", ":", "os", ".", "mkdir", "(", "name", ")", "os", ".", "chdir", "(", "name", ")", "is_temporary", "=", "True", "except", "OSError", ":", "if", "(", "not", "quiet", ")", ":", "raise", "warnings", ".", "warn", "(", "(", "'tests may fail, unable to change the CWD to '", "+", "name", ")", ",", "RuntimeWarning", ",", "stacklevel", "=", "3", ")", "try", ":", "(", "yield", "os", ".", "getcwd", "(", ")", ")", "finally", ":", "os", ".", "chdir", "(", "saved_dir", ")", "if", "is_temporary", ":", "rmtree", "(", "name", ")" ]
context manager that creates a temporary directory and set it as cwd .
train
false
7,982
def make_expando(primitive): if isinstance(primitive, dict): return Expando(primitive) elif isinstance(primitive, SEQS): seq = type(primitive) return seq((make_expando(attr) for attr in primitive)) else: return primitive
[ "def", "make_expando", "(", "primitive", ")", ":", "if", "isinstance", "(", "primitive", ",", "dict", ")", ":", "return", "Expando", "(", "primitive", ")", "elif", "isinstance", "(", "primitive", ",", "SEQS", ")", ":", "seq", "=", "type", "(", "primitive", ")", "return", "seq", "(", "(", "make_expando", "(", "attr", ")", "for", "attr", "in", "primitive", ")", ")", "else", ":", "return", "primitive" ]
creates an expando object .
train
false
7,983
def ThrottleUsage(): for table in vf_schema.SCHEMA.GetTables(): table.read_units = max(1, (table.read_units // 4)) table.write_units = max(1, (table.write_units // 4))
[ "def", "ThrottleUsage", "(", ")", ":", "for", "table", "in", "vf_schema", ".", "SCHEMA", ".", "GetTables", "(", ")", ":", "table", ".", "read_units", "=", "max", "(", "1", ",", "(", "table", ".", "read_units", "//", "4", ")", ")", "table", ".", "write_units", "=", "max", "(", "1", ",", "(", "table", ".", "write_units", "//", "4", ")", ")" ]
ensures that only a portion of total read/write capacity is consumed by this checker .
train
false
7,984
def withAttribute(*args, **attrDict): if args: attrs = args[:] else: attrs = attrDict.items() attrs = [(k, v) for (k, v) in attrs] def pa(s, l, tokens): for (attrName, attrValue) in attrs: if (attrName not in tokens): raise ParseException(s, l, ('no matching attribute ' + attrName)) if ((attrValue != withAttribute.ANY_VALUE) and (tokens[attrName] != attrValue)): raise ParseException(s, l, ("attribute '%s' has value '%s', must be '%s'" % (attrName, tokens[attrName], attrValue))) return pa
[ "def", "withAttribute", "(", "*", "args", ",", "**", "attrDict", ")", ":", "if", "args", ":", "attrs", "=", "args", "[", ":", "]", "else", ":", "attrs", "=", "attrDict", ".", "items", "(", ")", "attrs", "=", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "attrs", "]", "def", "pa", "(", "s", ",", "l", ",", "tokens", ")", ":", "for", "(", "attrName", ",", "attrValue", ")", "in", "attrs", ":", "if", "(", "attrName", "not", "in", "tokens", ")", ":", "raise", "ParseException", "(", "s", ",", "l", ",", "(", "'no matching attribute '", "+", "attrName", ")", ")", "if", "(", "(", "attrValue", "!=", "withAttribute", ".", "ANY_VALUE", ")", "and", "(", "tokens", "[", "attrName", "]", "!=", "attrValue", ")", ")", ":", "raise", "ParseException", "(", "s", ",", "l", ",", "(", "\"attribute '%s' has value '%s', must be '%s'\"", "%", "(", "attrName", ",", "tokens", "[", "attrName", "]", ",", "attrValue", ")", ")", ")", "return", "pa" ]
helper to create a validating parse action to be used with start tags created with c{l{makexmltags}} or c{l{makehtmltags}} .
train
true
7,985
def mutSet(individual): if (random.random() < 0.5): if (len(individual) > 0): individual.remove(random.choice(sorted(tuple(individual)))) else: individual.add(random.randrange(NBR_ITEMS)) return (individual,)
[ "def", "mutSet", "(", "individual", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "0.5", ")", ":", "if", "(", "len", "(", "individual", ")", ">", "0", ")", ":", "individual", ".", "remove", "(", "random", ".", "choice", "(", "sorted", "(", "tuple", "(", "individual", ")", ")", ")", ")", "else", ":", "individual", ".", "add", "(", "random", ".", "randrange", "(", "NBR_ITEMS", ")", ")", "return", "(", "individual", ",", ")" ]
mutation that pops or add an element .
train
false
7,986
def check_timeout(start_time, timeout): return timeutils.is_older_than(start_time, timeout)
[ "def", "check_timeout", "(", "start_time", ",", "timeout", ")", ":", "return", "timeutils", ".", "is_older_than", "(", "start_time", ",", "timeout", ")" ]
return true if the specified time has passed .
train
false
7,987
def test_sensor_value_from_code(): with assert_setup_component(1): setup_component(HASS, sensor.DOMAIN, {sensor.DOMAIN: {'platform': 'pilight', 'name': 'test', 'variable': 'test', 'payload': {'protocol': 'test-protocol'}, 'unit_of_measurement': 'fav unit'}}) state = HASS.states.get('sensor.test') assert (state.state == 'unknown') unit_of_measurement = state.attributes.get('unit_of_measurement') assert (unit_of_measurement == 'fav unit') fire_pilight_message(protocol='test-protocol', data={'test': 42}) HASS.block_till_done() state = HASS.states.get('sensor.test') assert (state.state == '42')
[ "def", "test_sensor_value_from_code", "(", ")", ":", "with", "assert_setup_component", "(", "1", ")", ":", "setup_component", "(", "HASS", ",", "sensor", ".", "DOMAIN", ",", "{", "sensor", ".", "DOMAIN", ":", "{", "'platform'", ":", "'pilight'", ",", "'name'", ":", "'test'", ",", "'variable'", ":", "'test'", ",", "'payload'", ":", "{", "'protocol'", ":", "'test-protocol'", "}", ",", "'unit_of_measurement'", ":", "'fav unit'", "}", "}", ")", "state", "=", "HASS", ".", "states", ".", "get", "(", "'sensor.test'", ")", "assert", "(", "state", ".", "state", "==", "'unknown'", ")", "unit_of_measurement", "=", "state", ".", "attributes", ".", "get", "(", "'unit_of_measurement'", ")", "assert", "(", "unit_of_measurement", "==", "'fav unit'", ")", "fire_pilight_message", "(", "protocol", "=", "'test-protocol'", ",", "data", "=", "{", "'test'", ":", "42", "}", ")", "HASS", ".", "block_till_done", "(", ")", "state", "=", "HASS", ".", "states", ".", "get", "(", "'sensor.test'", ")", "assert", "(", "state", ".", "state", "==", "'42'", ")" ]
test the setting of value via pilight .
train
false
7,988
def get_disk_usage(): files_path = get_files_path() if (not os.path.exists(files_path)): return 0 (err, out) = execute_in_shell(u'du -hsm {files_path}'.format(files_path=files_path)) return cint(out.split(u'\n')[(-2)].split(u' DCTB ')[0])
[ "def", "get_disk_usage", "(", ")", ":", "files_path", "=", "get_files_path", "(", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "files_path", ")", ")", ":", "return", "0", "(", "err", ",", "out", ")", "=", "execute_in_shell", "(", "u'du -hsm {files_path}'", ".", "format", "(", "files_path", "=", "files_path", ")", ")", "return", "cint", "(", "out", ".", "split", "(", "u'\\n'", ")", "[", "(", "-", "2", ")", "]", ".", "split", "(", "u' DCTB '", ")", "[", "0", "]", ")" ]
get disk usage in bytes for directory .
train
false
7,989
def getProfileBaseName(repository): if (repository.getProfileDirectory == None): return repository.baseName return os.path.join(repository.getProfileDirectory(), repository.baseName)
[ "def", "getProfileBaseName", "(", "repository", ")", ":", "if", "(", "repository", ".", "getProfileDirectory", "==", "None", ")", ":", "return", "repository", ".", "baseName", "return", "os", ".", "path", ".", "join", "(", "repository", ".", "getProfileDirectory", "(", ")", ",", "repository", ".", "baseName", ")" ]
get the profile base file name .
train
false
7,990
def test_timeit_shlex(): _ip.ex('def f(*a,**kw): pass') _ip.magic('timeit -n1 "this is a bug".count(" ")') _ip.magic('timeit -r1 -n1 f(" ", 1)') _ip.magic('timeit -r1 -n1 f(" ", 1, " ", 2, " ")') _ip.magic('timeit -r1 -n1 ("a " + "b")') _ip.magic('timeit -r1 -n1 f("a " + "b")') _ip.magic('timeit -r1 -n1 f("a " + "b ")')
[ "def", "test_timeit_shlex", "(", ")", ":", "_ip", ".", "ex", "(", "'def f(*a,**kw): pass'", ")", "_ip", ".", "magic", "(", "'timeit -n1 \"this is a bug\".count(\" \")'", ")", "_ip", ".", "magic", "(", "'timeit -r1 -n1 f(\" \", 1)'", ")", "_ip", ".", "magic", "(", "'timeit -r1 -n1 f(\" \", 1, \" \", 2, \" \")'", ")", "_ip", ".", "magic", "(", "'timeit -r1 -n1 (\"a \" + \"b\")'", ")", "_ip", ".", "magic", "(", "'timeit -r1 -n1 f(\"a \" + \"b\")'", ")", "_ip", ".", "magic", "(", "'timeit -r1 -n1 f(\"a \" + \"b \")'", ")" ]
test shlex issues with timeit .
train
false
7,991
def simple_merge(txt1, txt2): differ = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) diff = differ.compare(txt1.splitlines(1), txt2.splitlines(1)) content = u''.join([l[2:] for l in diff]) return content
[ "def", "simple_merge", "(", "txt1", ",", "txt2", ")", ":", "differ", "=", "difflib", ".", "Differ", "(", "charjunk", "=", "difflib", ".", "IS_CHARACTER_JUNK", ")", "diff", "=", "differ", ".", "compare", "(", "txt1", ".", "splitlines", "(", "1", ")", ",", "txt2", ".", "splitlines", "(", "1", ")", ")", "content", "=", "u''", ".", "join", "(", "[", "l", "[", "2", ":", "]", "for", "l", "in", "diff", "]", ")", "return", "content" ]
merges two texts .
train
false
7,992
@task def test_else(ctx): test_module(ctx, module=ELSE_TESTS)
[ "@", "task", "def", "test_else", "(", "ctx", ")", ":", "test_module", "(", "ctx", ",", "module", "=", "ELSE_TESTS", ")" ]
run the api test suite .
train
false
7,994
def check_ghp_import_installed(): try: subprocess.check_output(['ghp-import', '-h']) except OSError: req_missing(['ghp-import2'], 'deploy the site to GitHub Pages')
[ "def", "check_ghp_import_installed", "(", ")", ":", "try", ":", "subprocess", ".", "check_output", "(", "[", "'ghp-import'", ",", "'-h'", "]", ")", "except", "OSError", ":", "req_missing", "(", "[", "'ghp-import2'", "]", ",", "'deploy the site to GitHub Pages'", ")" ]
check if ghp-import is installed .
train
false
7,996
def HttpRequestToString(req, include_data=True): headers = '' for header in req.header_items(): headers += ('%s: %s\n' % (header[0], header[1])) template = '%(method)s %(selector)s %(type)s/1.1\nHost: %(host)s\n%(headers)s' if include_data: template = (template + '\n%(data)s') return (template % {'method': req.get_method(), 'selector': req.get_selector(), 'type': req.get_type().upper(), 'host': req.get_host(), 'headers': headers, 'data': req.get_data()})
[ "def", "HttpRequestToString", "(", "req", ",", "include_data", "=", "True", ")", ":", "headers", "=", "''", "for", "header", "in", "req", ".", "header_items", "(", ")", ":", "headers", "+=", "(", "'%s: %s\\n'", "%", "(", "header", "[", "0", "]", ",", "header", "[", "1", "]", ")", ")", "template", "=", "'%(method)s %(selector)s %(type)s/1.1\\nHost: %(host)s\\n%(headers)s'", "if", "include_data", ":", "template", "=", "(", "template", "+", "'\\n%(data)s'", ")", "return", "(", "template", "%", "{", "'method'", ":", "req", ".", "get_method", "(", ")", ",", "'selector'", ":", "req", ".", "get_selector", "(", ")", ",", "'type'", ":", "req", ".", "get_type", "(", ")", ".", "upper", "(", ")", ",", "'host'", ":", "req", ".", "get_host", "(", ")", ",", "'headers'", ":", "headers", ",", "'data'", ":", "req", ".", "get_data", "(", ")", "}", ")" ]
converts a urllib2 .
train
false
7,997
def decade_up(x, base=10): if (x == 0.0): return base lx = np.ceil((np.log(x) / np.log(base))) return (base ** lx)
[ "def", "decade_up", "(", "x", ",", "base", "=", "10", ")", ":", "if", "(", "x", "==", "0.0", ")", ":", "return", "base", "lx", "=", "np", ".", "ceil", "(", "(", "np", ".", "log", "(", "x", ")", "/", "np", ".", "log", "(", "base", ")", ")", ")", "return", "(", "base", "**", "lx", ")" ]
ceil x to the nearest higher decade .
train
false
7,998
def get_phylogenetic_row_metric(name): return getattr(qiime.beta_metrics, ('one_sample_' + name.lower()))
[ "def", "get_phylogenetic_row_metric", "(", "name", ")", ":", "return", "getattr", "(", "qiime", ".", "beta_metrics", ",", "(", "'one_sample_'", "+", "name", ".", "lower", "(", ")", ")", ")" ]
gets metric by name from qiime .
train
false
7,999
def replacingGlobals(function, **newGlobals): try: codeObject = function.func_code funcGlobals = function.func_globals except AttributeError: codeObject = function.__code__ funcGlobals = function.__globals__ for key in newGlobals: if (key not in funcGlobals): raise TypeError('Name bound by replacingGlobals but not present in module: {}'.format(key)) mergedGlobals = {} mergedGlobals.update(funcGlobals) mergedGlobals.update(newGlobals) newFunction = FunctionType(codeObject, mergedGlobals) mergedGlobals[function.__name__] = newFunction return newFunction
[ "def", "replacingGlobals", "(", "function", ",", "**", "newGlobals", ")", ":", "try", ":", "codeObject", "=", "function", ".", "func_code", "funcGlobals", "=", "function", ".", "func_globals", "except", "AttributeError", ":", "codeObject", "=", "function", ".", "__code__", "funcGlobals", "=", "function", ".", "__globals__", "for", "key", "in", "newGlobals", ":", "if", "(", "key", "not", "in", "funcGlobals", ")", ":", "raise", "TypeError", "(", "'Name bound by replacingGlobals but not present in module: {}'", ".", "format", "(", "key", ")", ")", "mergedGlobals", "=", "{", "}", "mergedGlobals", ".", "update", "(", "funcGlobals", ")", "mergedGlobals", ".", "update", "(", "newGlobals", ")", "newFunction", "=", "FunctionType", "(", "codeObject", ",", "mergedGlobals", ")", "mergedGlobals", "[", "function", ".", "__name__", "]", "=", "newFunction", "return", "newFunction" ]
create a copy of the given function with the given globals substituted .
train
false
8,000
@logic.validate(logic.schema.default_pagination_schema) def recently_changed_packages_activity_list(context, data_dict): model = context['model'] offset = data_dict.get('offset', 0) limit = int(data_dict.get('limit', config.get('ckan.activity_list_limit', 31))) _activity_objects = model.activity.recently_changed_packages_activity_list(limit=limit, offset=offset) activity_objects = _filter_activity_by_user(_activity_objects, _activity_stream_get_filtered_users()) return model_dictize.activity_list_dictize(activity_objects, context)
[ "@", "logic", ".", "validate", "(", "logic", ".", "schema", ".", "default_pagination_schema", ")", "def", "recently_changed_packages_activity_list", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "offset", "=", "data_dict", ".", "get", "(", "'offset'", ",", "0", ")", "limit", "=", "int", "(", "data_dict", ".", "get", "(", "'limit'", ",", "config", ".", "get", "(", "'ckan.activity_list_limit'", ",", "31", ")", ")", ")", "_activity_objects", "=", "model", ".", "activity", ".", "recently_changed_packages_activity_list", "(", "limit", "=", "limit", ",", "offset", "=", "offset", ")", "activity_objects", "=", "_filter_activity_by_user", "(", "_activity_objects", ",", "_activity_stream_get_filtered_users", "(", ")", ")", "return", "model_dictize", ".", "activity_list_dictize", "(", "activity_objects", ",", "context", ")" ]
return the site-wide stream of recently changed package activities .
train
false
8,001
@task def stop_django(): kill('python', 'runserver')
[ "@", "task", "def", "stop_django", "(", ")", ":", "kill", "(", "'python'", ",", "'runserver'", ")" ]
stop the geonode django application .
train
false
8,002
def sse_md5(params, **kwargs): _sse_md5(params, 'SSECustomer')
[ "def", "sse_md5", "(", "params", ",", "**", "kwargs", ")", ":", "_sse_md5", "(", "params", ",", "'SSECustomer'", ")" ]
s3 server-side encryption requires the encryption key to be sent to the server base64 encoded .
train
false
8,003
def setInstanceState(inst, unjellier, jellyList): state = unjellier.unjelly(jellyList[1]) if hasattr(inst, '__setstate__'): inst.__setstate__(state) else: inst.__dict__ = state return inst
[ "def", "setInstanceState", "(", "inst", ",", "unjellier", ",", "jellyList", ")", ":", "state", "=", "unjellier", ".", "unjelly", "(", "jellyList", "[", "1", "]", ")", "if", "hasattr", "(", "inst", ",", "'__setstate__'", ")", ":", "inst", ".", "__setstate__", "(", "state", ")", "else", ":", "inst", ".", "__dict__", "=", "state", "return", "inst" ]
utility method to default to normal state rules in unserialization .
train
false
8,004
def process_flags(flags=None): if (flags is None): flags = [] try: FLAGS(flags) except gflags.FlagsError as e: print(('%s\nUsage: %s ARGS\n%s' % (e, str(flags), FLAGS))) sys.exit(1) logging.getLogger().setLevel(getattr(logging, FLAGS.logging_level))
[ "def", "process_flags", "(", "flags", "=", "None", ")", ":", "if", "(", "flags", "is", "None", ")", ":", "flags", "=", "[", "]", "try", ":", "FLAGS", "(", "flags", ")", "except", "gflags", ".", "FlagsError", "as", "e", ":", "print", "(", "(", "'%s\\nUsage: %s ARGS\\n%s'", "%", "(", "e", ",", "str", "(", "flags", ")", ",", "FLAGS", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "getattr", "(", "logging", ",", "FLAGS", ".", "logging_level", ")", ")" ]
uses the command-line flags to set the logging level .
train
false
8,005
def add_new_user_history(user_profile, streams): one_week_ago = (now() - datetime.timedelta(weeks=1)) recipients = Recipient.objects.filter(type=Recipient.STREAM, type_id__in=[stream.id for stream in streams if (not stream.invite_only)]) recent_messages = Message.objects.filter(recipient_id__in=recipients, pub_date__gt=one_week_ago).order_by('-id') message_ids_to_use = list(reversed(recent_messages.values_list('id', flat=True)[0:100])) if (len(message_ids_to_use) == 0): return already_ids = set(UserMessage.objects.filter(message_id__in=message_ids_to_use, user_profile=user_profile).values_list('message_id', flat=True)) ums_to_create = [UserMessage(user_profile=user_profile, message_id=message_id, flags=UserMessage.flags.read) for message_id in message_ids_to_use if (message_id not in already_ids)] UserMessage.objects.bulk_create(ums_to_create)
[ "def", "add_new_user_history", "(", "user_profile", ",", "streams", ")", ":", "one_week_ago", "=", "(", "now", "(", ")", "-", "datetime", ".", "timedelta", "(", "weeks", "=", "1", ")", ")", "recipients", "=", "Recipient", ".", "objects", ".", "filter", "(", "type", "=", "Recipient", ".", "STREAM", ",", "type_id__in", "=", "[", "stream", ".", "id", "for", "stream", "in", "streams", "if", "(", "not", "stream", ".", "invite_only", ")", "]", ")", "recent_messages", "=", "Message", ".", "objects", ".", "filter", "(", "recipient_id__in", "=", "recipients", ",", "pub_date__gt", "=", "one_week_ago", ")", ".", "order_by", "(", "'-id'", ")", "message_ids_to_use", "=", "list", "(", "reversed", "(", "recent_messages", ".", "values_list", "(", "'id'", ",", "flat", "=", "True", ")", "[", "0", ":", "100", "]", ")", ")", "if", "(", "len", "(", "message_ids_to_use", ")", "==", "0", ")", ":", "return", "already_ids", "=", "set", "(", "UserMessage", ".", "objects", ".", "filter", "(", "message_id__in", "=", "message_ids_to_use", ",", "user_profile", "=", "user_profile", ")", ".", "values_list", "(", "'message_id'", ",", "flat", "=", "True", ")", ")", "ums_to_create", "=", "[", "UserMessage", "(", "user_profile", "=", "user_profile", ",", "message_id", "=", "message_id", ",", "flags", "=", "UserMessage", ".", "flags", ".", "read", ")", "for", "message_id", "in", "message_ids_to_use", "if", "(", "message_id", "not", "in", "already_ids", ")", "]", "UserMessage", ".", "objects", ".", "bulk_create", "(", "ums_to_create", ")" ]
give you the last 100 messages on your public streams .
train
false
8,006
def change_TORRENT_DIR(torrent_dir): if (torrent_dir == ''): sickbeard.TORRENT_DIR = '' return True if (ek(os.path.normpath, sickbeard.TORRENT_DIR) != ek(os.path.normpath, torrent_dir)): if helpers.makeDir(torrent_dir): sickbeard.TORRENT_DIR = ek(os.path.normpath, torrent_dir) logger.log((u'Changed torrent folder to ' + torrent_dir)) else: return False return True
[ "def", "change_TORRENT_DIR", "(", "torrent_dir", ")", ":", "if", "(", "torrent_dir", "==", "''", ")", ":", "sickbeard", ".", "TORRENT_DIR", "=", "''", "return", "True", "if", "(", "ek", "(", "os", ".", "path", ".", "normpath", ",", "sickbeard", ".", "TORRENT_DIR", ")", "!=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "torrent_dir", ")", ")", ":", "if", "helpers", ".", "makeDir", "(", "torrent_dir", ")", ":", "sickbeard", ".", "TORRENT_DIR", "=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "torrent_dir", ")", "logger", ".", "log", "(", "(", "u'Changed torrent folder to '", "+", "torrent_dir", ")", ")", "else", ":", "return", "False", "return", "True" ]
change torrent directory .
train
false
8,007
@utils.arg('network_id', metavar='<network_id>', help='ID of network') @shell.deprecated_network def do_tenant_network_show(cs, args): network = cs.tenant_networks.get(args.network_id) utils.print_dict(network._info)
[ "@", "utils", ".", "arg", "(", "'network_id'", ",", "metavar", "=", "'<network_id>'", ",", "help", "=", "'ID of network'", ")", "@", "shell", ".", "deprecated_network", "def", "do_tenant_network_show", "(", "cs", ",", "args", ")", ":", "network", "=", "cs", ".", "tenant_networks", ".", "get", "(", "args", ".", "network_id", ")", "utils", ".", "print_dict", "(", "network", ".", "_info", ")" ]
show a tenant network .
train
false
8,008
@decorators.memoize def _supports_regex(): return (tuple([int(i) for i in _get_version()]) > (0, 5))
[ "@", "decorators", ".", "memoize", "def", "_supports_regex", "(", ")", ":", "return", "(", "tuple", "(", "[", "int", "(", "i", ")", "for", "i", "in", "_get_version", "(", ")", "]", ")", ">", "(", "0", ",", "5", ")", ")" ]
check support of regexp .
train
false
8,009
def _CheckQuery(query): _ValidateString(query, 'query', MAXIMUM_QUERY_LENGTH, empty_ok=True) if (query is None): raise TypeError('query must be unicode, got None') if query.strip(): try: query_parser.Parse(query) except query_parser.QueryException as e: raise QueryError(('Failed to parse query "%s"' % query)) return query
[ "def", "_CheckQuery", "(", "query", ")", ":", "_ValidateString", "(", "query", ",", "'query'", ",", "MAXIMUM_QUERY_LENGTH", ",", "empty_ok", "=", "True", ")", "if", "(", "query", "is", "None", ")", ":", "raise", "TypeError", "(", "'query must be unicode, got None'", ")", "if", "query", ".", "strip", "(", ")", ":", "try", ":", "query_parser", ".", "Parse", "(", "query", ")", "except", "query_parser", ".", "QueryException", "as", "e", ":", "raise", "QueryError", "(", "(", "'Failed to parse query \"%s\"'", "%", "query", ")", ")", "return", "query" ]
checks a query is a valid query string .
train
false
8,010
def getargsfromtext(text, objname): signature = getsignaturefromtext(text, objname) if signature: argtxt = signature[(signature.find('(') + 1):(-1)] return argtxt.split(',')
[ "def", "getargsfromtext", "(", "text", ",", "objname", ")", ":", "signature", "=", "getsignaturefromtext", "(", "text", ",", "objname", ")", "if", "signature", ":", "argtxt", "=", "signature", "[", "(", "signature", ".", "find", "(", "'('", ")", "+", "1", ")", ":", "(", "-", "1", ")", "]", "return", "argtxt", ".", "split", "(", "','", ")" ]
get arguments from text .
train
true
8,012
def get_config_file(*args): if check_file_permissions(): ensure_local_plotly_files() return utils.load_json_dict(CONFIG_FILE, *args) else: return FILE_CONTENT[CONFIG_FILE]
[ "def", "get_config_file", "(", "*", "args", ")", ":", "if", "check_file_permissions", "(", ")", ":", "ensure_local_plotly_files", "(", ")", "return", "utils", ".", "load_json_dict", "(", "CONFIG_FILE", ",", "*", "args", ")", "else", ":", "return", "FILE_CONTENT", "[", "CONFIG_FILE", "]" ]
returns the configuration directory .
train
false
8,014
@register.tag def get_people(parser, token): try: (tag_name, arg) = token.contents.split(None, 1) except ValueError: raise template.TemplateSyntaxError, ('%s tag requires arguments' % token.contents.split()[0]) m1 = re.search('as (\\w+)', arg) m2 = re.search('(.*?) as (\\w+)', arg) if (not m1): raise template.TemplateSyntaxError, ('%s tag had invalid arguments' % tag_name) else: var_name = m1.groups()[0] return GetPeople(var_name) if (not m2): raise template.TemplateSyntaxError, ('%s tag had invalid arguments' % tag_name) else: (format_string, var_name) = m2.groups() return GetPeople(var_name, format_string[0])
[ "@", "register", ".", "tag", "def", "get_people", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "tag_name", ",", "arg", ")", "=", "token", ".", "contents", ".", "split", "(", "None", ",", "1", ")", "except", "ValueError", ":", "raise", "template", ".", "TemplateSyntaxError", ",", "(", "'%s tag requires arguments'", "%", "token", ".", "contents", ".", "split", "(", ")", "[", "0", "]", ")", "m1", "=", "re", ".", "search", "(", "'as (\\\\w+)'", ",", "arg", ")", "m2", "=", "re", ".", "search", "(", "'(.*?) as (\\\\w+)'", ",", "arg", ")", "if", "(", "not", "m1", ")", ":", "raise", "template", ".", "TemplateSyntaxError", ",", "(", "'%s tag had invalid arguments'", "%", "tag_name", ")", "else", ":", "var_name", "=", "m1", ".", "groups", "(", ")", "[", "0", "]", "return", "GetPeople", "(", "var_name", ")", "if", "(", "not", "m2", ")", ":", "raise", "template", ".", "TemplateSyntaxError", ",", "(", "'%s tag had invalid arguments'", "%", "tag_name", ")", "else", ":", "(", "format_string", ",", "var_name", ")", "=", "m2", ".", "groups", "(", ")", "return", "GetPeople", "(", "var_name", ",", "format_string", "[", "0", "]", ")" ]
gets any number of latest posts and stores them in a varable .
train
false
8,015
def set_exception_context(e, s): e._context = s
[ "def", "set_exception_context", "(", "e", ",", "s", ")", ":", "e", ".", "_context", "=", "s" ]
set the context of a given exception .
train
false
8,016
def policy_details(client, module): if (module.params.get('max_items') or module.params.get('next_marker')): module.fail_json(msg='Cannot specify max_items nor next_marker for query=policy.') lambda_facts = dict() function_name = module.params.get('function_name') if function_name: try: lambda_facts.update(policy=json.loads(client.get_policy(FunctionName=function_name)['Policy'])) except ClientError as e: if (e.response['Error']['Code'] == 'ResourceNotFoundException'): lambda_facts.update(policy={}) else: module.fail_json(msg='Unable to get {0} policy, error: {1}'.format(function_name, e)) else: module.fail_json(msg='Parameter function_name required for query=policy.') return {function_name: camel_dict_to_snake_dict(lambda_facts)}
[ "def", "policy_details", "(", "client", ",", "module", ")", ":", "if", "(", "module", ".", "params", ".", "get", "(", "'max_items'", ")", "or", "module", ".", "params", ".", "get", "(", "'next_marker'", ")", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "'Cannot specify max_items nor next_marker for query=policy.'", ")", "lambda_facts", "=", "dict", "(", ")", "function_name", "=", "module", ".", "params", ".", "get", "(", "'function_name'", ")", "if", "function_name", ":", "try", ":", "lambda_facts", ".", "update", "(", "policy", "=", "json", ".", "loads", "(", "client", ".", "get_policy", "(", "FunctionName", "=", "function_name", ")", "[", "'Policy'", "]", ")", ")", "except", "ClientError", "as", "e", ":", "if", "(", "e", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "==", "'ResourceNotFoundException'", ")", ":", "lambda_facts", ".", "update", "(", "policy", "=", "{", "}", ")", "else", ":", "module", ".", "fail_json", "(", "msg", "=", "'Unable to get {0} policy, error: {1}'", ".", "format", "(", "function_name", ",", "e", ")", ")", "else", ":", "module", ".", "fail_json", "(", "msg", "=", "'Parameter function_name required for query=policy.'", ")", "return", "{", "function_name", ":", "camel_dict_to_snake_dict", "(", "lambda_facts", ")", "}" ]
returns policy attached to a lambda function .
train
false
8,017
def getDiagonalFlippedLoops(loops): diagonalFlippedLoops = [] for loop in loops: diagonalFlippedLoops.append(getDiagonalFlippedLoop(loop)) return diagonalFlippedLoops
[ "def", "getDiagonalFlippedLoops", "(", "loops", ")", ":", "diagonalFlippedLoops", "=", "[", "]", "for", "loop", "in", "loops", ":", "diagonalFlippedLoops", ".", "append", "(", "getDiagonalFlippedLoop", "(", "loop", ")", ")", "return", "diagonalFlippedLoops" ]
get loops flipped over the dialogonal .
train
false
8,018
def lookupAddress(name, timeout=None): return getResolver().lookupAddress(name, timeout)
[ "def", "lookupAddress", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupAddress", "(", "name", ",", "timeout", ")" ]
perform an a record lookup .
train
false
8,019
def _windows_virtual(osdata): grains = dict() if (osdata['kernel'] != 'Windows'): return grains manufacturer = osdata.get('manufacturer', '') if (manufacturer is None): manufacturer = '' productname = osdata.get('productname', '') if (productname is None): productname = '' if ('QEMU' in manufacturer): grains['virtual'] = 'kvm' if ('Bochs' in manufacturer): grains['virtual'] = 'kvm' elif ('oVirt' in productname): grains['virtual'] = 'kvm' grains['virtual_subtype'] = 'oVirt' elif ('RHEV Hypervisor' in productname): grains['virtual'] = 'kvm' grains['virtual_subtype'] = 'rhev' elif ('VirtualBox' in productname): grains['virtual'] = 'VirtualBox' elif ('VMware Virtual Platform' in productname): grains['virtual'] = 'VMware' elif (('Microsoft' in manufacturer) and ('Virtual Machine' in productname)): grains['virtual'] = 'VirtualPC' elif ('Parallels Software' in manufacturer): grains['virtual'] = 'Parallels' elif ('CloudStack KVM Hypervisor' in productname): grains['virtual'] = 'kvm' grains['virtual_subtype'] = 'cloudstack' return grains
[ "def", "_windows_virtual", "(", "osdata", ")", ":", "grains", "=", "dict", "(", ")", "if", "(", "osdata", "[", "'kernel'", "]", "!=", "'Windows'", ")", ":", "return", "grains", "manufacturer", "=", "osdata", ".", "get", "(", "'manufacturer'", ",", "''", ")", "if", "(", "manufacturer", "is", "None", ")", ":", "manufacturer", "=", "''", "productname", "=", "osdata", ".", "get", "(", "'productname'", ",", "''", ")", "if", "(", "productname", "is", "None", ")", ":", "productname", "=", "''", "if", "(", "'QEMU'", "in", "manufacturer", ")", ":", "grains", "[", "'virtual'", "]", "=", "'kvm'", "if", "(", "'Bochs'", "in", "manufacturer", ")", ":", "grains", "[", "'virtual'", "]", "=", "'kvm'", "elif", "(", "'oVirt'", "in", "productname", ")", ":", "grains", "[", "'virtual'", "]", "=", "'kvm'", "grains", "[", "'virtual_subtype'", "]", "=", "'oVirt'", "elif", "(", "'RHEV Hypervisor'", "in", "productname", ")", ":", "grains", "[", "'virtual'", "]", "=", "'kvm'", "grains", "[", "'virtual_subtype'", "]", "=", "'rhev'", "elif", "(", "'VirtualBox'", "in", "productname", ")", ":", "grains", "[", "'virtual'", "]", "=", "'VirtualBox'", "elif", "(", "'VMware Virtual Platform'", "in", "productname", ")", ":", "grains", "[", "'virtual'", "]", "=", "'VMware'", "elif", "(", "(", "'Microsoft'", "in", "manufacturer", ")", "and", "(", "'Virtual Machine'", "in", "productname", ")", ")", ":", "grains", "[", "'virtual'", "]", "=", "'VirtualPC'", "elif", "(", "'Parallels Software'", "in", "manufacturer", ")", ":", "grains", "[", "'virtual'", "]", "=", "'Parallels'", "elif", "(", "'CloudStack KVM Hypervisor'", "in", "productname", ")", ":", "grains", "[", "'virtual'", "]", "=", "'kvm'", "grains", "[", "'virtual_subtype'", "]", "=", "'cloudstack'", "return", "grains" ]
returns what type of virtual hardware is under the hood .
train
true
8,020
def _disjoint_p(M, N, strict=False): (a1, b1, c1, d1) = M (a2, b2, c2, d2) = N (a1d1, b1c1) = ((a1 * d1), (b1 * c1)) (a2d2, b2c2) = ((a2 * d2), (b2 * c2)) if ((a1d1 == b1c1) and (a2d2 == b2c2)): return True if (a1d1 > b1c1): (a1, c1, b1, d1) = (b1, d1, a1, c1) if (a2d2 > b2c2): (a2, c2, b2, d2) = (b2, d2, a2, c2) if (not strict): return (((a2 * d1) >= (c2 * b1)) or ((b2 * c1) <= (d2 * a1))) else: return (((a2 * d1) > (c2 * b1)) or ((b2 * c1) < (d2 * a1)))
[ "def", "_disjoint_p", "(", "M", ",", "N", ",", "strict", "=", "False", ")", ":", "(", "a1", ",", "b1", ",", "c1", ",", "d1", ")", "=", "M", "(", "a2", ",", "b2", ",", "c2", ",", "d2", ")", "=", "N", "(", "a1d1", ",", "b1c1", ")", "=", "(", "(", "a1", "*", "d1", ")", ",", "(", "b1", "*", "c1", ")", ")", "(", "a2d2", ",", "b2c2", ")", "=", "(", "(", "a2", "*", "d2", ")", ",", "(", "b2", "*", "c2", ")", ")", "if", "(", "(", "a1d1", "==", "b1c1", ")", "and", "(", "a2d2", "==", "b2c2", ")", ")", ":", "return", "True", "if", "(", "a1d1", ">", "b1c1", ")", ":", "(", "a1", ",", "c1", ",", "b1", ",", "d1", ")", "=", "(", "b1", ",", "d1", ",", "a1", ",", "c1", ")", "if", "(", "a2d2", ">", "b2c2", ")", ":", "(", "a2", ",", "c2", ",", "b2", ",", "d2", ")", "=", "(", "b2", ",", "d2", ",", "a2", ",", "c2", ")", "if", "(", "not", "strict", ")", ":", "return", "(", "(", "(", "a2", "*", "d1", ")", ">=", "(", "c2", "*", "b1", ")", ")", "or", "(", "(", "b2", "*", "c1", ")", "<=", "(", "d2", "*", "a1", ")", ")", ")", "else", ":", "return", "(", "(", "(", "a2", "*", "d1", ")", ">", "(", "c2", "*", "b1", ")", ")", "or", "(", "(", "b2", "*", "c1", ")", "<", "(", "d2", "*", "a1", ")", ")", ")" ]
check if mobius transforms define disjoint intervals .
train
false
8,023
def list_legend(model, selected=None): if hasattr(model, 'model'): model = model.model() legend = '' for row in range(model.rowCount()): if ((selected is not None) and (row not in selected)): continue index = model.index(row, 0) icon = model.data(index, Qt.DecorationRole) (r, g, b, a) = QColor(icon.pixmap(12, 12).toImage().pixel(0, 0)).getRgb() text = model.data(index, Qt.DisplayRole) legend += (colored_square(r, g, b) + '<span class="legend-item">{}</span>'.format(text)) return legend
[ "def", "list_legend", "(", "model", ",", "selected", "=", "None", ")", ":", "if", "hasattr", "(", "model", ",", "'model'", ")", ":", "model", "=", "model", ".", "model", "(", ")", "legend", "=", "''", "for", "row", "in", "range", "(", "model", ".", "rowCount", "(", ")", ")", ":", "if", "(", "(", "selected", "is", "not", "None", ")", "and", "(", "row", "not", "in", "selected", ")", ")", ":", "continue", "index", "=", "model", ".", "index", "(", "row", ",", "0", ")", "icon", "=", "model", ".", "data", "(", "index", ",", "Qt", ".", "DecorationRole", ")", "(", "r", ",", "g", ",", "b", ",", "a", ")", "=", "QColor", "(", "icon", ".", "pixmap", "(", "12", ",", "12", ")", ".", "toImage", "(", ")", ".", "pixel", "(", "0", ",", "0", ")", ")", ".", "getRgb", "(", ")", "text", "=", "model", ".", "data", "(", "index", ",", "Qt", ".", "DisplayRole", ")", "legend", "+=", "(", "colored_square", "(", "r", ",", "g", ",", "b", ")", "+", "'<span class=\"legend-item\">{}</span>'", ".", "format", "(", "text", ")", ")", "return", "legend" ]
create html with a legend constructed from a qt model or a view .
train
false
8,024
def render_template_with_form(text, context=None): if (not context): context = {} if (u'form' not in context): context[u'form'] = TestForm() return render_template_with_bootstrap(text, context)
[ "def", "render_template_with_form", "(", "text", ",", "context", "=", "None", ")", ":", "if", "(", "not", "context", ")", ":", "context", "=", "{", "}", "if", "(", "u'form'", "not", "in", "context", ")", ":", "context", "[", "u'form'", "]", "=", "TestForm", "(", ")", "return", "render_template_with_bootstrap", "(", "text", ",", "context", ")" ]
create a template text that first loads bootstrap3 .
train
false
8,026
def ode_nth_linear_constant_coeff_undetermined_coefficients(eq, func, order, match): gensol = ode_nth_linear_constant_coeff_homogeneous(eq, func, order, match, returns='both') match.update(gensol) return _solve_undetermined_coefficients(eq, func, order, match)
[ "def", "ode_nth_linear_constant_coeff_undetermined_coefficients", "(", "eq", ",", "func", ",", "order", ",", "match", ")", ":", "gensol", "=", "ode_nth_linear_constant_coeff_homogeneous", "(", "eq", ",", "func", ",", "order", ",", "match", ",", "returns", "=", "'both'", ")", "match", ".", "update", "(", "gensol", ")", "return", "_solve_undetermined_coefficients", "(", "eq", ",", "func", ",", "order", ",", "match", ")" ]
solves an nth order linear differential equation with constant coefficients using the method of undetermined coefficients .
train
false
8,027
def auth_is_loggedin_user(): try: context_user = c.user except TypeError: context_user = None return bool(context_user)
[ "def", "auth_is_loggedin_user", "(", ")", ":", "try", ":", "context_user", "=", "c", ".", "user", "except", "TypeError", ":", "context_user", "=", "None", "return", "bool", "(", "context_user", ")" ]
do we have a logged in user .
train
false
8,028
def show_item(name, id_): (status, result) = _query(action=name, command=id_) return result
[ "def", "show_item", "(", "name", ",", "id_", ")", ":", "(", "status", ",", "result", ")", "=", "_query", "(", "action", "=", "name", ",", "command", "=", "id_", ")", "return", "result" ]
show an item cli example: .
train
true
8,029
def makepairs(x, y): xy = array([[a, b] for a in asarray(x) for b in asarray(y)]) return xy.T
[ "def", "makepairs", "(", "x", ",", "y", ")", ":", "xy", "=", "array", "(", "[", "[", "a", ",", "b", "]", "for", "a", "in", "asarray", "(", "x", ")", "for", "b", "in", "asarray", "(", "y", ")", "]", ")", "return", "xy", ".", "T" ]
helper function to create an array of pairs of x and y .
train
false
8,030
def isdistinct(seq): if (iter(seq) is seq): seen = set() seen_add = seen.add for item in seq: if (item in seen): return False seen_add(item) return True else: return (len(seq) == len(set(seq)))
[ "def", "isdistinct", "(", "seq", ")", ":", "if", "(", "iter", "(", "seq", ")", "is", "seq", ")", ":", "seen", "=", "set", "(", ")", "seen_add", "=", "seen", ".", "add", "for", "item", "in", "seq", ":", "if", "(", "item", "in", "seen", ")", ":", "return", "False", "seen_add", "(", "item", ")", "return", "True", "else", ":", "return", "(", "len", "(", "seq", ")", "==", "len", "(", "set", "(", "seq", ")", ")", ")" ]
all values in sequence are distinct .
train
false
8,032
def _parse_sgf_move(node_value): if ((node_value == '') or (node_value == 'tt')): return go.PASS_MOVE else: col = LETTERS.index(node_value[0].upper()) row = LETTERS.index(node_value[1].upper()) return (col, row)
[ "def", "_parse_sgf_move", "(", "node_value", ")", ":", "if", "(", "(", "node_value", "==", "''", ")", "or", "(", "node_value", "==", "'tt'", ")", ")", ":", "return", "go", ".", "PASS_MOVE", "else", ":", "col", "=", "LETTERS", ".", "index", "(", "node_value", "[", "0", "]", ".", "upper", "(", ")", ")", "row", "=", "LETTERS", ".", "index", "(", "node_value", "[", "1", "]", ".", "upper", "(", ")", ")", "return", "(", "col", ",", "row", ")" ]
given a well-formed move string .
train
false
8,036
def snapshot_get_all_by_host(context, host, filters=None): return IMPL.snapshot_get_all_by_host(context, host, filters)
[ "def", "snapshot_get_all_by_host", "(", "context", ",", "host", ",", "filters", "=", "None", ")", ":", "return", "IMPL", ".", "snapshot_get_all_by_host", "(", "context", ",", "host", ",", "filters", ")" ]
get all snapshots belonging to a host .
train
false
8,038
def eratosthenes(end, start=2, return_boolean=False): primes = [] if ((end < start) or (end < 2)): return [] is_prime = [True for i in range((end + 1))] is_prime[0] = is_prime[1] = False for i in range(2, (end + 1)): if (not is_prime[i]): continue if (start <= i <= end): primes.append(i) j = (i * i) while (j <= end): is_prime[j] = False j += i if return_boolean: return (primes, is_prime) return primes
[ "def", "eratosthenes", "(", "end", ",", "start", "=", "2", ",", "return_boolean", "=", "False", ")", ":", "primes", "=", "[", "]", "if", "(", "(", "end", "<", "start", ")", "or", "(", "end", "<", "2", ")", ")", ":", "return", "[", "]", "is_prime", "=", "[", "True", "for", "i", "in", "range", "(", "(", "end", "+", "1", ")", ")", "]", "is_prime", "[", "0", "]", "=", "is_prime", "[", "1", "]", "=", "False", "for", "i", "in", "range", "(", "2", ",", "(", "end", "+", "1", ")", ")", ":", "if", "(", "not", "is_prime", "[", "i", "]", ")", ":", "continue", "if", "(", "start", "<=", "i", "<=", "end", ")", ":", "primes", ".", "append", "(", "i", ")", "j", "=", "(", "i", "*", "i", ")", "while", "(", "j", "<=", "end", ")", ":", "is_prime", "[", "j", "]", "=", "False", "j", "+=", "i", "if", "return_boolean", ":", "return", "(", "primes", ",", "is_prime", ")", "return", "primes" ]
finds all primes < end .
train
false
8,039
def get_full_public_api_url(api_version=DEFAULT_API_VERSION): api_url = get_base_public_api_url() api_url = ('%s/%s' % (api_url, api_version)) return api_url
[ "def", "get_full_public_api_url", "(", "api_version", "=", "DEFAULT_API_VERSION", ")", ":", "api_url", "=", "get_base_public_api_url", "(", ")", "api_url", "=", "(", "'%s/%s'", "%", "(", "api_url", ",", "api_version", ")", ")", "return", "api_url" ]
return full public url to the api endpoint .
train
false
8,040
def test_blackbody_overflow(): photlam = (u.photon / (((u.cm ** 2) * u.s) * u.AA)) wave = [0, 1000.0, 100000.0, 1e+55] temp = 10000.0 with np.errstate(all=u'ignore'): bb_lam = (blackbody_lambda(wave, temp) * u.sr) flux = (bb_lam.to(photlam, u.spectral_density((wave * u.AA))) / u.sr) assert np.isnan(flux[0]) assert (np.log10(flux[(-1)].value) < (-134)) np.testing.assert_allclose(flux.value[1:(-1)], [3.38131732e+16, 3874513170000000.0], rtol=0.001) with np.errstate(all=u'ignore'): flux = blackbody_lambda(1, 10000.0) assert (flux.value == 0)
[ "def", "test_blackbody_overflow", "(", ")", ":", "photlam", "=", "(", "u", ".", "photon", "/", "(", "(", "(", "u", ".", "cm", "**", "2", ")", "*", "u", ".", "s", ")", "*", "u", ".", "AA", ")", ")", "wave", "=", "[", "0", ",", "1000.0", ",", "100000.0", ",", "1e+55", "]", "temp", "=", "10000.0", "with", "np", ".", "errstate", "(", "all", "=", "u'ignore'", ")", ":", "bb_lam", "=", "(", "blackbody_lambda", "(", "wave", ",", "temp", ")", "*", "u", ".", "sr", ")", "flux", "=", "(", "bb_lam", ".", "to", "(", "photlam", ",", "u", ".", "spectral_density", "(", "(", "wave", "*", "u", ".", "AA", ")", ")", ")", "/", "u", ".", "sr", ")", "assert", "np", ".", "isnan", "(", "flux", "[", "0", "]", ")", "assert", "(", "np", ".", "log10", "(", "flux", "[", "(", "-", "1", ")", "]", ".", "value", ")", "<", "(", "-", "134", ")", ")", "np", ".", "testing", ".", "assert_allclose", "(", "flux", ".", "value", "[", "1", ":", "(", "-", "1", ")", "]", ",", "[", "3.38131732e+16", ",", "3874513170000000.0", "]", ",", "rtol", "=", "0.001", ")", "with", "np", ".", "errstate", "(", "all", "=", "u'ignore'", ")", ":", "flux", "=", "blackbody_lambda", "(", "1", ",", "10000.0", ")", "assert", "(", "flux", ".", "value", "==", "0", ")" ]
test planck function with overflow .
train
false
8,045
def download_file_powershell(url, target): target = os.path.abspath(target) cmd = ['powershell', '-Command', ('(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)' % vars())] _clean_check(cmd, target)
[ "def", "download_file_powershell", "(", "url", ",", "target", ")", ":", "target", "=", "os", ".", "path", ".", "abspath", "(", "target", ")", "cmd", "=", "[", "'powershell'", ",", "'-Command'", ",", "(", "'(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)'", "%", "vars", "(", ")", ")", "]", "_clean_check", "(", "cmd", ",", "target", ")" ]
download the file at url to target using powershell .
train
true
8,046
def image_property_delete(context, prop_ref, image_ref, session=None): session = (session or get_session()) prop = session.query(models.ImageProperty).filter_by(image_id=image_ref, name=prop_ref).one() prop.delete(session=session) return prop
[ "def", "image_property_delete", "(", "context", ",", "prop_ref", ",", "image_ref", ",", "session", "=", "None", ")", ":", "session", "=", "(", "session", "or", "get_session", "(", ")", ")", "prop", "=", "session", ".", "query", "(", "models", ".", "ImageProperty", ")", ".", "filter_by", "(", "image_id", "=", "image_ref", ",", "name", "=", "prop_ref", ")", ".", "one", "(", ")", "prop", ".", "delete", "(", "session", "=", "session", ")", "return", "prop" ]
used internally by image_property_create and image_property_update .
train
false
8,049
def map_url_in(request, env, app=False): THREAD_LOCAL.routes = params map = MapUrlIn(request=request, env=env) map.sluggify() map.map_prefix() map.map_app() if params.routes_app: THREAD_LOCAL.routes = params_apps.get(app, params) if app: return map.application (root_static_file, version) = map.map_root_static() if root_static_file: map.update_request() return (root_static_file, version, map.env) if (map.languages and (map.map_static is False) and (map.arg0 == 'static') and (map.args(1) in map.languages)): map.map_controller() map.map_language() else: map.map_language() map.map_controller() (static_file, version) = map.map_static() if static_file: map.update_request() return (static_file, version, map.env) map.map_function() map.validate_args() map.update_request() return (None, None, map.env)
[ "def", "map_url_in", "(", "request", ",", "env", ",", "app", "=", "False", ")", ":", "THREAD_LOCAL", ".", "routes", "=", "params", "map", "=", "MapUrlIn", "(", "request", "=", "request", ",", "env", "=", "env", ")", "map", ".", "sluggify", "(", ")", "map", ".", "map_prefix", "(", ")", "map", ".", "map_app", "(", ")", "if", "params", ".", "routes_app", ":", "THREAD_LOCAL", ".", "routes", "=", "params_apps", ".", "get", "(", "app", ",", "params", ")", "if", "app", ":", "return", "map", ".", "application", "(", "root_static_file", ",", "version", ")", "=", "map", ".", "map_root_static", "(", ")", "if", "root_static_file", ":", "map", ".", "update_request", "(", ")", "return", "(", "root_static_file", ",", "version", ",", "map", ".", "env", ")", "if", "(", "map", ".", "languages", "and", "(", "map", ".", "map_static", "is", "False", ")", "and", "(", "map", ".", "arg0", "==", "'static'", ")", "and", "(", "map", ".", "args", "(", "1", ")", "in", "map", ".", "languages", ")", ")", ":", "map", ".", "map_controller", "(", ")", "map", ".", "map_language", "(", ")", "else", ":", "map", ".", "map_language", "(", ")", "map", ".", "map_controller", "(", ")", "(", "static_file", ",", "version", ")", "=", "map", ".", "map_static", "(", ")", "if", "static_file", ":", "map", ".", "update_request", "(", ")", "return", "(", "static_file", ",", "version", ",", "map", ".", "env", ")", "map", ".", "map_function", "(", ")", "map", ".", "validate_args", "(", ")", "map", ".", "update_request", "(", ")", "return", "(", "None", ",", "None", ",", "map", ".", "env", ")" ]
routes incoming url .
train
false
8,051
def ptr_byref(args, offset=(-1)): return args[offset]._obj
[ "def", "ptr_byref", "(", "args", ",", "offset", "=", "(", "-", "1", ")", ")", ":", "return", "args", "[", "offset", "]", ".", "_obj" ]
returns the pointer argument passed in by-reference .
train
false
8,052
@ClassBuilder.class_impl_registry.lower_getattr_generic(types.ClassInstanceType) def attr_impl(context, builder, typ, value, attr): if (attr in typ.struct): inst = context.make_helper(builder, typ, value=value) data_pointer = inst.data data = context.make_data_helper(builder, typ.get_data_type(), ref=data_pointer) return imputils.impl_ret_borrowed(context, builder, typ.struct[attr], getattr(data, _mangle_attr(attr))) elif (attr in typ.jitprops): getter = typ.jitprops[attr]['get'] sig = templates.signature(None, typ) dispatcher = types.Dispatcher(getter) sig = dispatcher.get_call_type(context.typing_context, [typ], {}) call = context.get_function(dispatcher, sig) out = call(builder, [value]) return imputils.impl_ret_new_ref(context, builder, sig.return_type, out) raise NotImplementedError('attribute {0!r} not implemented'.format(attr))
[ "@", "ClassBuilder", ".", "class_impl_registry", ".", "lower_getattr_generic", "(", "types", ".", "ClassInstanceType", ")", "def", "attr_impl", "(", "context", ",", "builder", ",", "typ", ",", "value", ",", "attr", ")", ":", "if", "(", "attr", "in", "typ", ".", "struct", ")", ":", "inst", "=", "context", ".", "make_helper", "(", "builder", ",", "typ", ",", "value", "=", "value", ")", "data_pointer", "=", "inst", ".", "data", "data", "=", "context", ".", "make_data_helper", "(", "builder", ",", "typ", ".", "get_data_type", "(", ")", ",", "ref", "=", "data_pointer", ")", "return", "imputils", ".", "impl_ret_borrowed", "(", "context", ",", "builder", ",", "typ", ".", "struct", "[", "attr", "]", ",", "getattr", "(", "data", ",", "_mangle_attr", "(", "attr", ")", ")", ")", "elif", "(", "attr", "in", "typ", ".", "jitprops", ")", ":", "getter", "=", "typ", ".", "jitprops", "[", "attr", "]", "[", "'get'", "]", "sig", "=", "templates", ".", "signature", "(", "None", ",", "typ", ")", "dispatcher", "=", "types", ".", "Dispatcher", "(", "getter", ")", "sig", "=", "dispatcher", ".", "get_call_type", "(", "context", ".", "typing_context", ",", "[", "typ", "]", ",", "{", "}", ")", "call", "=", "context", ".", "get_function", "(", "dispatcher", ",", "sig", ")", "out", "=", "call", "(", "builder", ",", "[", "value", "]", ")", "return", "imputils", ".", "impl_ret_new_ref", "(", "context", ",", "builder", ",", "sig", ".", "return_type", ",", "out", ")", "raise", "NotImplementedError", "(", "'attribute {0!r} not implemented'", ".", "format", "(", "attr", ")", ")" ]
generic setattr() for @jitclass instances .
train
false
8,053
def journald_json_formatter(output_file): accumulated = {} def handle_output_line(line): if line: (key, value) = line.split('=', 1) accumulated[key] = value elif accumulated: raw_message = accumulated.get('MESSAGE', '{}') try: message = json.loads(raw_message) except ValueError: message = dict(message=raw_message) message[u'_HOSTNAME'] = accumulated.get('_HOSTNAME', '<no hostname>') message[u'_PROCESS_NAME'] = accumulated.get('_SYSTEMD_UNIT', '<no unit>') output_file.write((json.dumps(message) + '\n')) accumulated.clear() return handle_output_line
[ "def", "journald_json_formatter", "(", "output_file", ")", ":", "accumulated", "=", "{", "}", "def", "handle_output_line", "(", "line", ")", ":", "if", "line", ":", "(", "key", ",", "value", ")", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "accumulated", "[", "key", "]", "=", "value", "elif", "accumulated", ":", "raw_message", "=", "accumulated", ".", "get", "(", "'MESSAGE'", ",", "'{}'", ")", "try", ":", "message", "=", "json", ".", "loads", "(", "raw_message", ")", "except", "ValueError", ":", "message", "=", "dict", "(", "message", "=", "raw_message", ")", "message", "[", "u'_HOSTNAME'", "]", "=", "accumulated", ".", "get", "(", "'_HOSTNAME'", ",", "'<no hostname>'", ")", "message", "[", "u'_PROCESS_NAME'", "]", "=", "accumulated", ".", "get", "(", "'_SYSTEMD_UNIT'", ",", "'<no unit>'", ")", "output_file", ".", "write", "(", "(", "json", ".", "dumps", "(", "message", ")", "+", "'\\n'", ")", ")", "accumulated", ".", "clear", "(", ")", "return", "handle_output_line" ]
create an output handler which turns journalds export format back into eliot json with extra fields to identify the log origin .
train
false
8,055
@memoize def from_theme(name, fallback=None): if hasattr(QtGui.QIcon, u'fromTheme'): (base, ext) = os.path.splitext(name) if fallback: qicon = QtGui.QIcon.fromTheme(base, icon(fallback)) else: qicon = QtGui.QIcon.fromTheme(base) if (not qicon.isNull()): return qicon return icon((fallback or name))
[ "@", "memoize", "def", "from_theme", "(", "name", ",", "fallback", "=", "None", ")", ":", "if", "hasattr", "(", "QtGui", ".", "QIcon", ",", "u'fromTheme'", ")", ":", "(", "base", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "name", ")", "if", "fallback", ":", "qicon", "=", "QtGui", ".", "QIcon", ".", "fromTheme", "(", "base", ",", "icon", "(", "fallback", ")", ")", "else", ":", "qicon", "=", "QtGui", ".", "QIcon", ".", "fromTheme", "(", "base", ")", "if", "(", "not", "qicon", ".", "isNull", "(", ")", ")", ":", "return", "qicon", "return", "icon", "(", "(", "fallback", "or", "name", ")", ")" ]
grab an icon from the current theme with a fallback support older versions of qt checking for fromthemes availability .
train
false
8,056
def submodule_update(git_path, module, dest, track_submodules, force=False): params = get_submodule_update_params(module, git_path, dest) if (not os.path.exists(os.path.join(dest, '.gitmodules'))): return (0, '', '') cmd = [git_path, 'submodule', 'sync'] (rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest) if (('remote' in params) and track_submodules): cmd = [git_path, 'submodule', 'update', '--init', '--recursive', '--remote'] else: cmd = [git_path, 'submodule', 'update', '--init', '--recursive'] if force: cmd.append('--force') (rc, out, err) = module.run_command(cmd, cwd=dest) if (rc != 0): module.fail_json(msg=(('Failed to init/update submodules: %s' % out) + err)) return (rc, out, err)
[ "def", "submodule_update", "(", "git_path", ",", "module", ",", "dest", ",", "track_submodules", ",", "force", "=", "False", ")", ":", "params", "=", "get_submodule_update_params", "(", "module", ",", "git_path", ",", "dest", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "dest", ",", "'.gitmodules'", ")", ")", ")", ":", "return", "(", "0", ",", "''", ",", "''", ")", "cmd", "=", "[", "git_path", ",", "'submodule'", ",", "'sync'", "]", "(", "rc", ",", "out", ",", "err", ")", "=", "module", ".", "run_command", "(", "cmd", ",", "check_rc", "=", "True", ",", "cwd", "=", "dest", ")", "if", "(", "(", "'remote'", "in", "params", ")", "and", "track_submodules", ")", ":", "cmd", "=", "[", "git_path", ",", "'submodule'", ",", "'update'", ",", "'--init'", ",", "'--recursive'", ",", "'--remote'", "]", "else", ":", "cmd", "=", "[", "git_path", ",", "'submodule'", ",", "'update'", ",", "'--init'", ",", "'--recursive'", "]", "if", "force", ":", "cmd", ".", "append", "(", "'--force'", ")", "(", "rc", ",", "out", ",", "err", ")", "=", "module", ".", "run_command", "(", "cmd", ",", "cwd", "=", "dest", ")", "if", "(", "rc", "!=", "0", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "(", "'Failed to init/update submodules: %s'", "%", "out", ")", "+", "err", ")", ")", "return", "(", "rc", ",", "out", ",", "err", ")" ]
init and update any submodules .
train
false
8,057
def test_get_code(): s = u('"""a docstring"""\nclass SomeClass(object, mixin):\n def __init__(self):\n self.xy = 3.0\n """statement docstr"""\n def some_method(self):\n return 1\n def yield_method(self):\n while hasattr(self, \'xy\'):\n yield True\n for x in [1, 2]:\n yield x\n def empty(self):\n pass\nclass Empty:\n pass\nclass WithDocstring:\n """class docstr"""\n pass\ndef method_with_docstring():\n """class docstr"""\n pass\n') assert (ParserWithRecovery(load_grammar(), s).module.get_code() == s)
[ "def", "test_get_code", "(", ")", ":", "s", "=", "u", "(", "'\"\"\"a docstring\"\"\"\\nclass SomeClass(object, mixin):\\n def __init__(self):\\n self.xy = 3.0\\n \"\"\"statement docstr\"\"\"\\n def some_method(self):\\n return 1\\n def yield_method(self):\\n while hasattr(self, \\'xy\\'):\\n yield True\\n for x in [1, 2]:\\n yield x\\n def empty(self):\\n pass\\nclass Empty:\\n pass\\nclass WithDocstring:\\n \"\"\"class docstr\"\"\"\\n pass\\ndef method_with_docstring():\\n \"\"\"class docstr\"\"\"\\n pass\\n'", ")", "assert", "(", "ParserWithRecovery", "(", "load_grammar", "(", ")", ",", "s", ")", ".", "module", ".", "get_code", "(", ")", "==", "s", ")" ]
use the same code that the parser also generates .
train
false
8,060
def _make_memoryview(size): return memoryview(bytearray(size))
[ "def", "_make_memoryview", "(", "size", ")", ":", "return", "memoryview", "(", "bytearray", "(", "size", ")", ")" ]
create a new memoryview wrapped around a bytearray of the given size .
train
false
8,061
def port(container, private_port): status = base_status.copy() client = _get_client() try: port_info = client.port(_get_container_infos(container)['Id'], private_port) _valid(status, id_=container, out=port_info) except Exception: _invalid(status, id_=container, out=traceback.format_exc()) return status
[ "def", "port", "(", "container", ",", "private_port", ")", ":", "status", "=", "base_status", ".", "copy", "(", ")", "client", "=", "_get_client", "(", ")", "try", ":", "port_info", "=", "client", ".", "port", "(", "_get_container_infos", "(", "container", ")", "[", "'Id'", "]", ",", "private_port", ")", "_valid", "(", "status", ",", "id_", "=", "container", ",", "out", "=", "port_info", ")", "except", "Exception", ":", "_invalid", "(", "status", ",", "id_", "=", "container", ",", "out", "=", "traceback", ".", "format_exc", "(", ")", ")", "return", "status" ]
returns port mapping information for a given container .
train
false
8,062
def get_tensor_with_parent_name(tensor): tensor_name = tensor.name if (tensor.op.inputs[0].name is not None): return ((tensor.op.inputs[0].name + '_') + tensor_name) return tensor_name
[ "def", "get_tensor_with_parent_name", "(", "tensor", ")", ":", "tensor_name", "=", "tensor", ".", "name", "if", "(", "tensor", ".", "op", ".", "inputs", "[", "0", "]", ".", "name", "is", "not", "None", ")", ":", "return", "(", "(", "tensor", ".", "op", ".", "inputs", "[", "0", "]", ".", "name", "+", "'_'", ")", "+", "tensor_name", ")", "return", "tensor_name" ]
get a tensor name with its parent tensors name as prefix .
train
false
8,063
@disable_signal_for_loaddata def update_simple_plugins(**kwargs): instance = kwargs[u'instance'] if kwargs.get(u'created', False): p_revisions = SimplePlugin.objects.filter(article=instance.article, deleted=False) p_revisions.update(article_revision=instance)
[ "@", "disable_signal_for_loaddata", "def", "update_simple_plugins", "(", "**", "kwargs", ")", ":", "instance", "=", "kwargs", "[", "u'instance'", "]", "if", "kwargs", ".", "get", "(", "u'created'", ",", "False", ")", ":", "p_revisions", "=", "SimplePlugin", ".", "objects", ".", "filter", "(", "article", "=", "instance", ".", "article", ",", "deleted", "=", "False", ")", "p_revisions", ".", "update", "(", "article_revision", "=", "instance", ")" ]
every time a new article revision is created .
train
false
8,064
def get_storage_number_map(drive_types=(DRIVE_REMOVABLE, DRIVE_FIXED), debug=False): mask = GetLogicalDrives() type_map = {letter: GetDriveType(((letter + u':') + os.sep)) for (i, letter) in enumerate(string.ascii_uppercase) if (mask & (1 << i))} drives = (letter for (letter, dt) in type_map.iteritems() if (dt in drive_types)) ans = defaultdict(list) for letter in drives: try: sn = get_storage_number(((u'\\\\.\\' + letter) + u':')) ans[sn[:2]].append((sn[2], letter)) except WindowsError as err: if debug: prints((u'Failed to get storage number for drive: %s with error: %s' % (letter, as_unicode(err)))) continue for val in ans.itervalues(): val.sort(key=itemgetter(0)) return dict(ans)
[ "def", "get_storage_number_map", "(", "drive_types", "=", "(", "DRIVE_REMOVABLE", ",", "DRIVE_FIXED", ")", ",", "debug", "=", "False", ")", ":", "mask", "=", "GetLogicalDrives", "(", ")", "type_map", "=", "{", "letter", ":", "GetDriveType", "(", "(", "(", "letter", "+", "u':'", ")", "+", "os", ".", "sep", ")", ")", "for", "(", "i", ",", "letter", ")", "in", "enumerate", "(", "string", ".", "ascii_uppercase", ")", "if", "(", "mask", "&", "(", "1", "<<", "i", ")", ")", "}", "drives", "=", "(", "letter", "for", "(", "letter", ",", "dt", ")", "in", "type_map", ".", "iteritems", "(", ")", "if", "(", "dt", "in", "drive_types", ")", ")", "ans", "=", "defaultdict", "(", "list", ")", "for", "letter", "in", "drives", ":", "try", ":", "sn", "=", "get_storage_number", "(", "(", "(", "u'\\\\\\\\.\\\\'", "+", "letter", ")", "+", "u':'", ")", ")", "ans", "[", "sn", "[", ":", "2", "]", "]", ".", "append", "(", "(", "sn", "[", "2", "]", ",", "letter", ")", ")", "except", "WindowsError", "as", "err", ":", "if", "debug", ":", "prints", "(", "(", "u'Failed to get storage number for drive: %s with error: %s'", "%", "(", "letter", ",", "as_unicode", "(", "err", ")", ")", ")", ")", "continue", "for", "val", "in", "ans", ".", "itervalues", "(", ")", ":", "val", ".", "sort", "(", "key", "=", "itemgetter", "(", "0", ")", ")", "return", "dict", "(", "ans", ")" ]
get a mapping of drive letters to storage numbers for all drives on system .
train
false
8,065
def scheduled_backup(older_than=6, ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=False): odb = new_backup(older_than, ignore_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, force=force) return odb
[ "def", "scheduled_backup", "(", "older_than", "=", "6", ",", "ignore_files", "=", "False", ",", "backup_path_db", "=", "None", ",", "backup_path_files", "=", "None", ",", "backup_path_private_files", "=", "None", ",", "force", "=", "False", ")", ":", "odb", "=", "new_backup", "(", "older_than", ",", "ignore_files", ",", "backup_path_db", "=", "backup_path_db", ",", "backup_path_files", "=", "backup_path_files", ",", "force", "=", "force", ")", "return", "odb" ]
this function is called from scheduler deletes backups older than 7 days takes backup .
train
false
8,067
def find_disk(virt_dom): xml_desc = virt_dom.XMLDesc(0) domain = etree.fromstring(xml_desc) os_type = domain.find('os/type').text driver = None if (CONF.libvirt.virt_type == 'lxc'): filesystem = domain.find('devices/filesystem') driver = filesystem.find('driver') source = filesystem.find('source') disk_path = source.get('dir') disk_path = disk_path[0:disk_path.rfind('rootfs')] disk_path = os.path.join(disk_path, 'disk') elif ((CONF.libvirt.virt_type == 'parallels') and (os_type == obj_fields.VMMode.EXE)): filesystem = domain.find('devices/filesystem') driver = filesystem.find('driver') source = filesystem.find('source') disk_path = source.get('file') else: disk = domain.find('devices/disk') driver = disk.find('driver') source = disk.find('source') disk_path = (source.get('file') or source.get('dev')) if ((not disk_path) and (CONF.libvirt.images_type == 'rbd')): disk_path = source.get('name') if disk_path: disk_path = ('rbd:' + disk_path) if (not disk_path): raise RuntimeError(_("Can't retrieve root device path from instance libvirt configuration")) if (driver is not None): format = driver.get('type') if (format == 'aio'): format = 'raw' else: format = None return (disk_path, format)
[ "def", "find_disk", "(", "virt_dom", ")", ":", "xml_desc", "=", "virt_dom", ".", "XMLDesc", "(", "0", ")", "domain", "=", "etree", ".", "fromstring", "(", "xml_desc", ")", "os_type", "=", "domain", ".", "find", "(", "'os/type'", ")", ".", "text", "driver", "=", "None", "if", "(", "CONF", ".", "libvirt", ".", "virt_type", "==", "'lxc'", ")", ":", "filesystem", "=", "domain", ".", "find", "(", "'devices/filesystem'", ")", "driver", "=", "filesystem", ".", "find", "(", "'driver'", ")", "source", "=", "filesystem", ".", "find", "(", "'source'", ")", "disk_path", "=", "source", ".", "get", "(", "'dir'", ")", "disk_path", "=", "disk_path", "[", "0", ":", "disk_path", ".", "rfind", "(", "'rootfs'", ")", "]", "disk_path", "=", "os", ".", "path", ".", "join", "(", "disk_path", ",", "'disk'", ")", "elif", "(", "(", "CONF", ".", "libvirt", ".", "virt_type", "==", "'parallels'", ")", "and", "(", "os_type", "==", "obj_fields", ".", "VMMode", ".", "EXE", ")", ")", ":", "filesystem", "=", "domain", ".", "find", "(", "'devices/filesystem'", ")", "driver", "=", "filesystem", ".", "find", "(", "'driver'", ")", "source", "=", "filesystem", ".", "find", "(", "'source'", ")", "disk_path", "=", "source", ".", "get", "(", "'file'", ")", "else", ":", "disk", "=", "domain", ".", "find", "(", "'devices/disk'", ")", "driver", "=", "disk", ".", "find", "(", "'driver'", ")", "source", "=", "disk", ".", "find", "(", "'source'", ")", "disk_path", "=", "(", "source", ".", "get", "(", "'file'", ")", "or", "source", ".", "get", "(", "'dev'", ")", ")", "if", "(", "(", "not", "disk_path", ")", "and", "(", "CONF", ".", "libvirt", ".", "images_type", "==", "'rbd'", ")", ")", ":", "disk_path", "=", "source", ".", "get", "(", "'name'", ")", "if", "disk_path", ":", "disk_path", "=", "(", "'rbd:'", "+", "disk_path", ")", "if", "(", "not", "disk_path", ")", ":", "raise", "RuntimeError", "(", "_", "(", "\"Can't retrieve root device path from instance libvirt configuration\"", ")", ")", "if", "(", "driver", "is", "not", "None", ")", ":", "format", "=", "driver", ".", "get", "(", "'type'", ")", "if", "(", "format", "==", "'aio'", ")", ":", "format", "=", "'raw'", "else", ":", "format", "=", "None", "return", "(", "disk_path", ",", "format", ")" ]
find root device path for instance may be file or device .
train
false
8,068
def BuildUri(uri, url_params=None, escape_params=True): parameter_list = DictionaryToParamList(url_params, escape_params) if parameter_list: if (uri.find('?') != (-1)): full_uri = '&'.join(([uri] + parameter_list)) else: full_uri = ('%s%s' % (uri, ('?%s' % '&'.join(([] + parameter_list))))) else: full_uri = uri return full_uri
[ "def", "BuildUri", "(", "uri", ",", "url_params", "=", "None", ",", "escape_params", "=", "True", ")", ":", "parameter_list", "=", "DictionaryToParamList", "(", "url_params", ",", "escape_params", ")", "if", "parameter_list", ":", "if", "(", "uri", ".", "find", "(", "'?'", ")", "!=", "(", "-", "1", ")", ")", ":", "full_uri", "=", "'&'", ".", "join", "(", "(", "[", "uri", "]", "+", "parameter_list", ")", ")", "else", ":", "full_uri", "=", "(", "'%s%s'", "%", "(", "uri", ",", "(", "'?%s'", "%", "'&'", ".", "join", "(", "(", "[", "]", "+", "parameter_list", ")", ")", ")", ")", ")", "else", ":", "full_uri", "=", "uri", "return", "full_uri" ]
converts a uri string and a collection of parameters into a uri .
train
false
8,072
def _classify_point(re, im): if ((not re) and (not im)): return OO if (not re): if (im > 0): return A2 else: return A4 elif (not im): if (re > 0): return A1 else: return A3
[ "def", "_classify_point", "(", "re", ",", "im", ")", ":", "if", "(", "(", "not", "re", ")", "and", "(", "not", "im", ")", ")", ":", "return", "OO", "if", "(", "not", "re", ")", ":", "if", "(", "im", ">", "0", ")", ":", "return", "A2", "else", ":", "return", "A4", "elif", "(", "not", "im", ")", ":", "if", "(", "re", ">", "0", ")", ":", "return", "A1", "else", ":", "return", "A3" ]
return the half-axis on which point is located .
train
false
8,073
def _ssh_copy_key(ssh_bin, master_address, ec2_key_pair_file, keyfile): with open(ec2_key_pair_file, 'rb') as f: args = [('bash -c "cat > %s" && chmod 600 %s' % (keyfile, keyfile))] _check_output(*_ssh_run(ssh_bin, master_address, ec2_key_pair_file, args, stdin=f.read()))
[ "def", "_ssh_copy_key", "(", "ssh_bin", ",", "master_address", ",", "ec2_key_pair_file", ",", "keyfile", ")", ":", "with", "open", "(", "ec2_key_pair_file", ",", "'rb'", ")", "as", "f", ":", "args", "=", "[", "(", "'bash -c \"cat > %s\" && chmod 600 %s'", "%", "(", "keyfile", ",", "keyfile", ")", ")", "]", "_check_output", "(", "*", "_ssh_run", "(", "ssh_bin", ",", "master_address", ",", "ec2_key_pair_file", ",", "args", ",", "stdin", "=", "f", ".", "read", "(", ")", ")", ")" ]
prepare master to ssh to slaves by copying the emr private key to the master node .
train
false
8,074
def boto_supports_volume_encryption(): return (hasattr(boto, 'Version') and (LooseVersion(boto.Version) >= LooseVersion('2.29.0')))
[ "def", "boto_supports_volume_encryption", "(", ")", ":", "return", "(", "hasattr", "(", "boto", ",", "'Version'", ")", "and", "(", "LooseVersion", "(", "boto", ".", "Version", ")", ">=", "LooseVersion", "(", "'2.29.0'", ")", ")", ")" ]
check if boto library supports encryption of ebs volumes returns: true if boto library has the named param as an argument on the request_spot_instances method .
train
false
8,075
def dataset_map_from_iterable(iterable): return {dataset.dataset_id: dataset for dataset in iterable}
[ "def", "dataset_map_from_iterable", "(", "iterable", ")", ":", "return", "{", "dataset", ".", "dataset_id", ":", "dataset", "for", "dataset", "in", "iterable", "}" ]
turn a list of datasets into a map from their ids to the datasets .
train
false
8,077
def retry_on_eintr(function, *args, **kw): while True: try: return function(*args, **kw) except IOError as e: if (e.errno != errno.EINTR): raise
[ "def", "retry_on_eintr", "(", "function", ",", "*", "args", ",", "**", "kw", ")", ":", "while", "True", ":", "try", ":", "return", "function", "(", "*", "args", ",", "**", "kw", ")", "except", "IOError", "as", "e", ":", "if", "(", "e", ".", "errno", "!=", "errno", ".", "EINTR", ")", ":", "raise" ]
run a function and retry it while getting eintr errors .
train
true
8,078
def SecSocketConnection(host, username, password, port=DEFAULT_PORT): try: stream = SocketStream.from_new_secure_socket(host, port, username, password) except: raise LoginError('authentication failure') return Connection(Channel(stream))
[ "def", "SecSocketConnection", "(", "host", ",", "username", ",", "password", ",", "port", "=", "DEFAULT_PORT", ")", ":", "try", ":", "stream", "=", "SocketStream", ".", "from_new_secure_socket", "(", "host", ",", "port", ",", "username", ",", "password", ")", "except", ":", "raise", "LoginError", "(", "'authentication failure'", ")", "return", "Connection", "(", "Channel", "(", "stream", ")", ")" ]
shorthand for creating secure socket connections .
train
false
8,079
@defer.inlineCallbacks def _runSequentially(callables, stopOnFirstError=False): results = [] for f in callables: d = defer.maybeDeferred(f) try: thing = (yield d) results.append((defer.SUCCESS, thing)) except Exception: results.append((defer.FAILURE, Failure())) if stopOnFirstError: break defer.returnValue(results)
[ "@", "defer", ".", "inlineCallbacks", "def", "_runSequentially", "(", "callables", ",", "stopOnFirstError", "=", "False", ")", ":", "results", "=", "[", "]", "for", "f", "in", "callables", ":", "d", "=", "defer", ".", "maybeDeferred", "(", "f", ")", "try", ":", "thing", "=", "(", "yield", "d", ")", "results", ".", "append", "(", "(", "defer", ".", "SUCCESS", ",", "thing", ")", ")", "except", "Exception", ":", "results", ".", "append", "(", "(", "defer", ".", "FAILURE", ",", "Failure", "(", ")", ")", ")", "if", "stopOnFirstError", ":", "break", "defer", ".", "returnValue", "(", "results", ")" ]
run the given callables one after the other .
train
false
8,081
def run_hook(component, translation, script, env=None, *args): if script: command = [script] if args: command.extend(args) if component.is_repo_link: target = component.linked_subproject else: target = component environment = {'WL_VCS': target.vcs, 'WL_REPO': target.repo, 'WL_PATH': target.get_path(), 'WL_FILEMASK': component.filemask, 'WL_TEMPLATE': component.template, 'WL_FILE_FORMAT': component.file_format, 'WL_BRANCH': component.branch} if translation: environment['WL_LANGUAGE'] = translation.language_code if (env is not None): environment.update(env) try: subprocess.check_call(command, env=get_clean_env(environment), cwd=component.get_path()) return True except (OSError, subprocess.CalledProcessError) as err: component.log_error('failed to run hook script %s: %s', script, err) return False
[ "def", "run_hook", "(", "component", ",", "translation", ",", "script", ",", "env", "=", "None", ",", "*", "args", ")", ":", "if", "script", ":", "command", "=", "[", "script", "]", "if", "args", ":", "command", ".", "extend", "(", "args", ")", "if", "component", ".", "is_repo_link", ":", "target", "=", "component", ".", "linked_subproject", "else", ":", "target", "=", "component", "environment", "=", "{", "'WL_VCS'", ":", "target", ".", "vcs", ",", "'WL_REPO'", ":", "target", ".", "repo", ",", "'WL_PATH'", ":", "target", ".", "get_path", "(", ")", ",", "'WL_FILEMASK'", ":", "component", ".", "filemask", ",", "'WL_TEMPLATE'", ":", "component", ".", "template", ",", "'WL_FILE_FORMAT'", ":", "component", ".", "file_format", ",", "'WL_BRANCH'", ":", "component", ".", "branch", "}", "if", "translation", ":", "environment", "[", "'WL_LANGUAGE'", "]", "=", "translation", ".", "language_code", "if", "(", "env", "is", "not", "None", ")", ":", "environment", ".", "update", "(", "env", ")", "try", ":", "subprocess", ".", "check_call", "(", "command", ",", "env", "=", "get_clean_env", "(", "environment", ")", ",", "cwd", "=", "component", ".", "get_path", "(", ")", ")", "return", "True", "except", "(", "OSError", ",", "subprocess", ".", "CalledProcessError", ")", "as", "err", ":", "component", ".", "log_error", "(", "'failed to run hook script %s: %s'", ",", "script", ",", "err", ")", "return", "False" ]
try to find and execute a hook from the specified project directory .
train
false
8,082
def get_jinja_env(): templates_directory = os.path.abspath(os.path.join(__file__, '..', '..', templates)) jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_directory)) return jinja_environment
[ "def", "get_jinja_env", "(", ")", ":", "templates_directory", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "__file__", ",", "'..'", ",", "'..'", ",", "templates", ")", ")", "jinja_environment", "=", "jinja2", ".", "Environment", "(", "loader", "=", "jinja2", ".", "FileSystemLoader", "(", "templates_directory", ")", ")", "return", "jinja_environment" ]
returns a jinja environment with a filesystemloader for our templates .
train
false
8,083
def rm_auth_key_from_file(user, source, config='.ssh/authorized_keys', saltenv='base'): lfile = __salt__['cp.cache_file'](source, saltenv) if (not os.path.isfile(lfile)): raise CommandExecutionError('Failed to pull key file from salt file server') s_keys = _validate_keys(lfile) if (not s_keys): err = 'No keys detected in {0}. Is file properly formatted?'.format(source) log.error(err) __context__['ssh_auth.error'] = err return 'fail' else: rval = '' for key in s_keys: rval += rm_auth_key(user, key, config) if ('Key not removed' in rval): return 'Key not removed' elif ('Key removed' in rval): return 'Key removed' else: return 'Key not present'
[ "def", "rm_auth_key_from_file", "(", "user", ",", "source", ",", "config", "=", "'.ssh/authorized_keys'", ",", "saltenv", "=", "'base'", ")", ":", "lfile", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "source", ",", "saltenv", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "lfile", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Failed to pull key file from salt file server'", ")", "s_keys", "=", "_validate_keys", "(", "lfile", ")", "if", "(", "not", "s_keys", ")", ":", "err", "=", "'No keys detected in {0}. Is file properly formatted?'", ".", "format", "(", "source", ")", "log", ".", "error", "(", "err", ")", "__context__", "[", "'ssh_auth.error'", "]", "=", "err", "return", "'fail'", "else", ":", "rval", "=", "''", "for", "key", "in", "s_keys", ":", "rval", "+=", "rm_auth_key", "(", "user", ",", "key", ",", "config", ")", "if", "(", "'Key not removed'", "in", "rval", ")", ":", "return", "'Key not removed'", "elif", "(", "'Key removed'", "in", "rval", ")", ":", "return", "'Key removed'", "else", ":", "return", "'Key not present'" ]
remove an authorized key from the specified users authorized key file .
train
true
8,084
def delete_system_info(key, default=None): obj = meta.Session.query(SystemInfo).filter_by(key=key).first() if obj: meta.Session.delete(obj) meta.Session.commit()
[ "def", "delete_system_info", "(", "key", ",", "default", "=", "None", ")", ":", "obj", "=", "meta", ".", "Session", ".", "query", "(", "SystemInfo", ")", ".", "filter_by", "(", "key", "=", "key", ")", ".", "first", "(", ")", "if", "obj", ":", "meta", ".", "Session", ".", "delete", "(", "obj", ")", "meta", ".", "Session", ".", "commit", "(", ")" ]
delete data from system_info table .
train
false
8,085
def dist_string(dist): out = (u'%.1f%%' % ((1 - dist) * 100)) if (dist <= config['match']['strong_rec_thresh'].as_number()): out = ui.colorize('text_success', out) elif (dist <= config['match']['medium_rec_thresh'].as_number()): out = ui.colorize('text_warning', out) else: out = ui.colorize('text_error', out) return out
[ "def", "dist_string", "(", "dist", ")", ":", "out", "=", "(", "u'%.1f%%'", "%", "(", "(", "1", "-", "dist", ")", "*", "100", ")", ")", "if", "(", "dist", "<=", "config", "[", "'match'", "]", "[", "'strong_rec_thresh'", "]", ".", "as_number", "(", ")", ")", ":", "out", "=", "ui", ".", "colorize", "(", "'text_success'", ",", "out", ")", "elif", "(", "dist", "<=", "config", "[", "'match'", "]", "[", "'medium_rec_thresh'", "]", ".", "as_number", "(", ")", ")", ":", "out", "=", "ui", ".", "colorize", "(", "'text_warning'", ",", "out", ")", "else", ":", "out", "=", "ui", ".", "colorize", "(", "'text_error'", ",", "out", ")", "return", "out" ]
formats a distance as a colorized similarity percentage string .
train
false
8,086
def flatpage(request, url): if ((not url.endswith('/')) and settings.APPEND_SLASH): return HttpResponseRedirect(('%s/' % request.path)) if (not url.startswith('/')): url = ('/' + url) f = get_object_or_404(FlatPage, url__exact=url, sites__id__exact=settings.SITE_ID) return render_flatpage(request, f)
[ "def", "flatpage", "(", "request", ",", "url", ")", ":", "if", "(", "(", "not", "url", ".", "endswith", "(", "'/'", ")", ")", "and", "settings", ".", "APPEND_SLASH", ")", ":", "return", "HttpResponseRedirect", "(", "(", "'%s/'", "%", "request", ".", "path", ")", ")", "if", "(", "not", "url", ".", "startswith", "(", "'/'", ")", ")", ":", "url", "=", "(", "'/'", "+", "url", ")", "f", "=", "get_object_or_404", "(", "FlatPage", ",", "url__exact", "=", "url", ",", "sites__id__exact", "=", "settings", ".", "SITE_ID", ")", "return", "render_flatpage", "(", "request", ",", "f", ")" ]
public interface to the flat page view .
train
false