id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
22,962
def chain_dot(*arrs): return reduce((lambda x, y: np.dot(y, x)), arrs[::(-1)])
[ "def", "chain_dot", "(", "*", "arrs", ")", ":", "return", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "np", ".", "dot", "(", "y", ",", "x", ")", ")", ",", "arrs", "[", ":", ":", "(", "-", "1", ")", "]", ")" ]
returns the dot product of the given matrices .
train
false
22,963
def _entity_descriptor(entity, key): insp = inspection.inspect(entity) if insp.is_selectable: description = entity entity = insp.c elif insp.is_aliased_class: entity = insp.entity description = entity elif hasattr(insp, 'mapper'): description = entity = insp.mapper.class_ else: description = entity try: return getattr(entity, key) except AttributeError: raise sa_exc.InvalidRequestError(("Entity '%s' has no property '%s'" % (description, key)))
[ "def", "_entity_descriptor", "(", "entity", ",", "key", ")", ":", "insp", "=", "inspection", ".", "inspect", "(", "entity", ")", "if", "insp", ".", "is_selectable", ":", "description", "=", "entity", "entity", "=", "insp", ".", "c", "elif", "insp", ".", "is_aliased_class", ":", "entity", "=", "insp", ".", "entity", "description", "=", "entity", "elif", "hasattr", "(", "insp", ",", "'mapper'", ")", ":", "description", "=", "entity", "=", "insp", ".", "mapper", ".", "class_", "else", ":", "description", "=", "entity", "try", ":", "return", "getattr", "(", "entity", ",", "key", ")", "except", "AttributeError", ":", "raise", "sa_exc", ".", "InvalidRequestError", "(", "(", "\"Entity '%s' has no property '%s'\"", "%", "(", "description", ",", "key", ")", ")", ")" ]
return a class attribute given an entity and string name .
train
false
22,964
def sockaddr(host, port, network='ipv4'): address_family = {'ipv4': socket.AF_INET, 'ipv6': socket.AF_INET6}[network] for (family, _, _, _, ip) in socket.getaddrinfo(host, None, address_family): ip = ip[0] if (family == address_family): break else: log.error(('Could not find %s address for %r' % (network, host))) info = socket.getaddrinfo(host, None, address_family) host = socket.inet_pton(address_family, ip) sockaddr = p16(address_family) sockaddr += pack(port, word_size=16, endianness='big') length = 0 if (network == 'ipv4'): sockaddr += host length = 16 else: sockaddr += p32(4294967295) sockaddr += host length = (len(sockaddr) + 4) return (sockaddr, length, address_family)
[ "def", "sockaddr", "(", "host", ",", "port", ",", "network", "=", "'ipv4'", ")", ":", "address_family", "=", "{", "'ipv4'", ":", "socket", ".", "AF_INET", ",", "'ipv6'", ":", "socket", ".", "AF_INET6", "}", "[", "network", "]", "for", "(", "family", ",", "_", ",", "_", ",", "_", ",", "ip", ")", "in", "socket", ".", "getaddrinfo", "(", "host", ",", "None", ",", "address_family", ")", ":", "ip", "=", "ip", "[", "0", "]", "if", "(", "family", "==", "address_family", ")", ":", "break", "else", ":", "log", ".", "error", "(", "(", "'Could not find %s address for %r'", "%", "(", "network", ",", "host", ")", ")", ")", "info", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "None", ",", "address_family", ")", "host", "=", "socket", ".", "inet_pton", "(", "address_family", ",", "ip", ")", "sockaddr", "=", "p16", "(", "address_family", ")", "sockaddr", "+=", "pack", "(", "port", ",", "word_size", "=", "16", ",", "endianness", "=", "'big'", ")", "length", "=", "0", "if", "(", "network", "==", "'ipv4'", ")", ":", "sockaddr", "+=", "host", "length", "=", "16", "else", ":", "sockaddr", "+=", "p32", "(", "4294967295", ")", "sockaddr", "+=", "host", "length", "=", "(", "len", "(", "sockaddr", ")", "+", "4", ")", "return", "(", "sockaddr", ",", "length", ",", "address_family", ")" ]
sockaddr -> creates a sockaddr_in or sockaddr_in6 memory buffer for use in shellcode .
train
false
22,966
def text_to_bin(ip): if (':' not in ip): return ipv4_to_bin(ip) else: return ipv6_to_bin(ip)
[ "def", "text_to_bin", "(", "ip", ")", ":", "if", "(", "':'", "not", "in", "ip", ")", ":", "return", "ipv4_to_bin", "(", "ip", ")", "else", ":", "return", "ipv6_to_bin", "(", "ip", ")" ]
converts human readable ipv4 or ipv6 string to binary representation .
train
false
22,968
def str2dn(dn, flags=0): if (not dn): return [] return ldap.functions._ldap_function_call(None, _ldap.str2dn, dn, flags)
[ "def", "str2dn", "(", "dn", ",", "flags", "=", "0", ")", ":", "if", "(", "not", "dn", ")", ":", "return", "[", "]", "return", "ldap", ".", "functions", ".", "_ldap_function_call", "(", "None", ",", "_ldap", ".", "str2dn", ",", "dn", ",", "flags", ")" ]
this function takes a dn as string as parameter and returns a decomposed dn .
train
false
22,969
def assert_clean(data): def _ensure_clean(value): if (value != bleach.clean(value)): raise ValueError return escape_html(data)
[ "def", "assert_clean", "(", "data", ")", ":", "def", "_ensure_clean", "(", "value", ")", ":", "if", "(", "value", "!=", "bleach", ".", "clean", "(", "value", ")", ")", ":", "raise", "ValueError", "return", "escape_html", "(", "data", ")" ]
ensure that data is cleaned :raise: assertionerror .
train
false
22,971
def ListBuckets(): return ObjectStore.ListInstances()
[ "def", "ListBuckets", "(", ")", ":", "return", "ObjectStore", ".", "ListInstances", "(", ")" ]
return the list of bucket names initialized in the object store .
train
false
22,972
def _check_rule(rule, _rule): if (_rule.get('from_port') is None): _rule['from_port'] = (-1) if (_rule.get('to_port') is None): _rule['to_port'] = (-1) if ((rule['ip_protocol'] == _rule['ip_protocol']) and (str(rule['from_port']) == str(_rule['from_port'])) and (str(rule['to_port']) == str(_rule['to_port']))): _cidr_ip = _rule.get('cidr_ip') if (_cidr_ip and (_cidr_ip == rule.get('cidr_ip'))): return True _owner_id = _rule.get('source_group_owner_id') if (_owner_id and (_owner_id == rule.get('source_group_owner_id'))): return True _group_id = _rule.get('source_group_group_id') if (_group_id and (_group_id == rule.get('source_group_group_id'))): return True _group_name = _rule.get('source_group_name') if (_group_name and (_group_id == rule.get('source_group_name'))): return True return False
[ "def", "_check_rule", "(", "rule", ",", "_rule", ")", ":", "if", "(", "_rule", ".", "get", "(", "'from_port'", ")", "is", "None", ")", ":", "_rule", "[", "'from_port'", "]", "=", "(", "-", "1", ")", "if", "(", "_rule", ".", "get", "(", "'to_port'", ")", "is", "None", ")", ":", "_rule", "[", "'to_port'", "]", "=", "(", "-", "1", ")", "if", "(", "(", "rule", "[", "'ip_protocol'", "]", "==", "_rule", "[", "'ip_protocol'", "]", ")", "and", "(", "str", "(", "rule", "[", "'from_port'", "]", ")", "==", "str", "(", "_rule", "[", "'from_port'", "]", ")", ")", "and", "(", "str", "(", "rule", "[", "'to_port'", "]", ")", "==", "str", "(", "_rule", "[", "'to_port'", "]", ")", ")", ")", ":", "_cidr_ip", "=", "_rule", ".", "get", "(", "'cidr_ip'", ")", "if", "(", "_cidr_ip", "and", "(", "_cidr_ip", "==", "rule", ".", "get", "(", "'cidr_ip'", ")", ")", ")", ":", "return", "True", "_owner_id", "=", "_rule", ".", "get", "(", "'source_group_owner_id'", ")", "if", "(", "_owner_id", "and", "(", "_owner_id", "==", "rule", ".", "get", "(", "'source_group_owner_id'", ")", ")", ")", ":", "return", "True", "_group_id", "=", "_rule", ".", "get", "(", "'source_group_group_id'", ")", "if", "(", "_group_id", "and", "(", "_group_id", "==", "rule", ".", "get", "(", "'source_group_group_id'", ")", ")", ")", ":", "return", "True", "_group_name", "=", "_rule", ".", "get", "(", "'source_group_name'", ")", "if", "(", "_group_name", "and", "(", "_group_id", "==", "rule", ".", "get", "(", "'source_group_name'", ")", ")", ")", ":", "return", "True", "return", "False" ]
check to see if two rules are the same .
train
false
22,973
def maybe_timedelta(delta): if isinstance(delta, numbers.Real): return timedelta(seconds=delta) return delta
[ "def", "maybe_timedelta", "(", "delta", ")", ":", "if", "isinstance", "(", "delta", ",", "numbers", ".", "Real", ")", ":", "return", "timedelta", "(", "seconds", "=", "delta", ")", "return", "delta" ]
convert integer to timedelta .
train
false
22,974
def scrub_dt_dn(dt, dn): (ndt, ndn) = (dt, dn) if (dt in lower_case_files_for): (ndt, ndn) = (scrub(dt), scrub(dn)) return (ndt, ndn)
[ "def", "scrub_dt_dn", "(", "dt", ",", "dn", ")", ":", "(", "ndt", ",", "ndn", ")", "=", "(", "dt", ",", "dn", ")", "if", "(", "dt", "in", "lower_case_files_for", ")", ":", "(", "ndt", ",", "ndn", ")", "=", "(", "scrub", "(", "dt", ")", ",", "scrub", "(", "dn", ")", ")", "return", "(", "ndt", ",", "ndn", ")" ]
returns in lowercase and code friendly names of doctype and name for certain types .
train
false
22,975
def prepare_earth_position_vel(time): from ..solar_system import get_body_barycentric, get_body_barycentric_posvel earth_pv = get_body_barycentric_posvel(u'earth', time) sun = get_body_barycentric(u'sun', time) earth_heliocentric = (earth_pv[0] - sun).get_xyz(xyz_axis=(-1)).to(u.au).value earth_pv = np.concatenate((earth_pv[0].get_xyz(xyz_axis=(-1)).to(u.au)[..., np.newaxis, :].value, earth_pv[1].get_xyz(xyz_axis=(-1)).to((u.au / u.d))[..., np.newaxis, :].value), axis=(-2)) return (earth_pv, earth_heliocentric)
[ "def", "prepare_earth_position_vel", "(", "time", ")", ":", "from", ".", ".", "solar_system", "import", "get_body_barycentric", ",", "get_body_barycentric_posvel", "earth_pv", "=", "get_body_barycentric_posvel", "(", "u'earth'", ",", "time", ")", "sun", "=", "get_body_barycentric", "(", "u'sun'", ",", "time", ")", "earth_heliocentric", "=", "(", "earth_pv", "[", "0", "]", "-", "sun", ")", ".", "get_xyz", "(", "xyz_axis", "=", "(", "-", "1", ")", ")", ".", "to", "(", "u", ".", "au", ")", ".", "value", "earth_pv", "=", "np", ".", "concatenate", "(", "(", "earth_pv", "[", "0", "]", ".", "get_xyz", "(", "xyz_axis", "=", "(", "-", "1", ")", ")", ".", "to", "(", "u", ".", "au", ")", "[", "...", ",", "np", ".", "newaxis", ",", ":", "]", ".", "value", ",", "earth_pv", "[", "1", "]", ".", "get_xyz", "(", "xyz_axis", "=", "(", "-", "1", ")", ")", ".", "to", "(", "(", "u", ".", "au", "/", "u", ".", "d", ")", ")", "[", "...", ",", "np", ".", "newaxis", ",", ":", "]", ".", "value", ")", ",", "axis", "=", "(", "-", "2", ")", ")", "return", "(", "earth_pv", ",", "earth_heliocentric", ")" ]
get barycentric position and velocity .
train
false
22,976
def get_browser(user_agent): match = re.search('(?i)(firefox|msie|chrome|safari|trident)', user_agent, re.IGNORECASE) if match: browser = match.group(1) else: browser = None return browser
[ "def", "get_browser", "(", "user_agent", ")", ":", "match", "=", "re", ".", "search", "(", "'(?i)(firefox|msie|chrome|safari|trident)'", ",", "user_agent", ",", "re", ".", "IGNORECASE", ")", "if", "match", ":", "browser", "=", "match", ".", "group", "(", "1", ")", "else", ":", "browser", "=", "None", "return", "browser" ]
get browser name from user agent .
train
false
22,977
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
commit messages to transifex .
train
false
22,978
def set_owner_permissions(resource): if resource.polymorphic_ctype: if (resource.polymorphic_ctype.name == 'layer'): for perm in LAYER_ADMIN_PERMISSIONS: assign_perm(perm, resource.owner, resource.layer) set_geofence_owner(resource, str(resource.owner)) for perm in ADMIN_PERMISSIONS: assign_perm(perm, resource.owner, resource.get_self_resource())
[ "def", "set_owner_permissions", "(", "resource", ")", ":", "if", "resource", ".", "polymorphic_ctype", ":", "if", "(", "resource", ".", "polymorphic_ctype", ".", "name", "==", "'layer'", ")", ":", "for", "perm", "in", "LAYER_ADMIN_PERMISSIONS", ":", "assign_perm", "(", "perm", ",", "resource", ".", "owner", ",", "resource", ".", "layer", ")", "set_geofence_owner", "(", "resource", ",", "str", "(", "resource", ".", "owner", ")", ")", "for", "perm", "in", "ADMIN_PERMISSIONS", ":", "assign_perm", "(", "perm", ",", "resource", ".", "owner", ",", "resource", ".", "get_self_resource", "(", ")", ")" ]
assign all admin permissions to the owner .
train
false
22,979
def decorate_logger_methods(logger): logger.findCaller = find_caller for key in LOGGER_KEYS: log_method = getattr(logger, key) log_method = decorate_log_method(log_method) setattr(logger, key, log_method) return logger
[ "def", "decorate_logger_methods", "(", "logger", ")", ":", "logger", ".", "findCaller", "=", "find_caller", "for", "key", "in", "LOGGER_KEYS", ":", "log_method", "=", "getattr", "(", "logger", ",", "key", ")", "log_method", "=", "decorate_log_method", "(", "log_method", ")", "setattr", "(", "logger", ",", "key", ",", "log_method", ")", "return", "logger" ]
decorate all the logger methods so all the keys in the extra dictionary are automatically prefixed with an underscore to avoid clashes with standard log record attributes .
train
false
22,980
@contextmanager def _temp_alembic_ini(db_url): with TemporaryDirectory() as td: alembic_ini = os.path.join(td, 'alembic.ini') write_alembic_ini(alembic_ini, db_url) (yield alembic_ini)
[ "@", "contextmanager", "def", "_temp_alembic_ini", "(", "db_url", ")", ":", "with", "TemporaryDirectory", "(", ")", "as", "td", ":", "alembic_ini", "=", "os", ".", "path", ".", "join", "(", "td", ",", "'alembic.ini'", ")", "write_alembic_ini", "(", "alembic_ini", ",", "db_url", ")", "(", "yield", "alembic_ini", ")" ]
context manager for temporary jupyterhub alembic directory temporarily write an alembic .
train
false
22,981
def grab_doc(cmd, trap_error=True): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = proc.communicate() if (trap_error and proc.returncode): msg = (u'Attempting to run %s. Returned Error: %s' % (cmd, stderr)) raise IOError(msg) if stderr: return stderr return stdout
[ "def", "grab_doc", "(", "cmd", ",", "trap_error", "=", "True", ")", ":", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ")", "(", "stdout", ",", "stderr", ")", "=", "proc", ".", "communicate", "(", ")", "if", "(", "trap_error", "and", "proc", ".", "returncode", ")", ":", "msg", "=", "(", "u'Attempting to run %s. Returned Error: %s'", "%", "(", "cmd", ",", "stderr", ")", ")", "raise", "IOError", "(", "msg", ")", "if", "stderr", ":", "return", "stderr", "return", "stdout" ]
run cmd without args and grab documentation .
train
false
22,983
def check_doi(doi): if (not doi): return None doi_check = re.search('10\\.\\d{4}/\\S+', doi) if (doi_check is not None): return doi_check.group() return None
[ "def", "check_doi", "(", "doi", ")", ":", "if", "(", "not", "doi", ")", ":", "return", "None", "doi_check", "=", "re", ".", "search", "(", "'10\\\\.\\\\d{4}/\\\\S+'", ",", "doi", ")", "if", "(", "doi_check", "is", "not", "None", ")", ":", "return", "doi_check", ".", "group", "(", ")", "return", "None" ]
check if something that looks like a doi is present anywhere in the string .
train
false
22,984
def unix_path(path, is_win32=utils.is_win32): unix_path = path if is_win32(): first = path[0] second = path[1] if (second == u':'): unix_path = ((u'/' + first) + path[2:].replace(u'\\', u'/')) return unix_path
[ "def", "unix_path", "(", "path", ",", "is_win32", "=", "utils", ".", "is_win32", ")", ":", "unix_path", "=", "path", "if", "is_win32", "(", ")", ":", "first", "=", "path", "[", "0", "]", "second", "=", "path", "[", "1", "]", "if", "(", "second", "==", "u':'", ")", ":", "unix_path", "=", "(", "(", "u'/'", "+", "first", ")", "+", "path", "[", "2", ":", "]", ".", "replace", "(", "u'\\\\'", ",", "u'/'", ")", ")", "return", "unix_path" ]
git for windows requires unix paths .
train
false
22,985
def make_field_value_list(headers, field, mdata): return sorted(set(mdata[:, headers.index(field)]))
[ "def", "make_field_value_list", "(", "headers", ",", "field", ",", "mdata", ")", ":", "return", "sorted", "(", "set", "(", "mdata", "[", ":", ",", "headers", ".", "index", "(", "field", ")", "]", ")", ")" ]
return sorted list of unique values field takes in mdata .
train
false
22,987
def _get_image_infos(image): status = base_status.copy() client = _get_client() try: infos = client.inspect_image(image) if infos: _valid(status, id_=infos['Id'], out=infos, comment='found') except Exception: pass if (not status['id']): _invalid(status) raise CommandExecutionError("ImageID '{0}' could not be resolved to an existing Image".format(image)) return status['out']
[ "def", "_get_image_infos", "(", "image", ")", ":", "status", "=", "base_status", ".", "copy", "(", ")", "client", "=", "_get_client", "(", ")", "try", ":", "infos", "=", "client", ".", "inspect_image", "(", "image", ")", "if", "infos", ":", "_valid", "(", "status", ",", "id_", "=", "infos", "[", "'Id'", "]", ",", "out", "=", "infos", ",", "comment", "=", "'found'", ")", "except", "Exception", ":", "pass", "if", "(", "not", "status", "[", "'id'", "]", ")", ":", "_invalid", "(", "status", ")", "raise", "CommandExecutionError", "(", "\"ImageID '{0}' could not be resolved to an existing Image\"", ".", "format", "(", "image", ")", ")", "return", "status", "[", "'out'", "]" ]
verify that the image exists we will try to resolve either by: - name - image_id - tag image image name / image id / image tag returns the image id .
train
false
22,988
def create_floatingip(floating_network, port=None, profile=None): conn = _auth(profile) return conn.create_floatingip(floating_network, port)
[ "def", "create_floatingip", "(", "floating_network", ",", "port", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "create_floatingip", "(", "floating_network", ",", "port", ")" ]
creates a new floatingip cli example: .
train
true
22,989
def minmax_scale(X, feature_range=(0, 1), axis=0, copy=True): X = check_array(X, copy=False, ensure_2d=False, warn_on_dtype=True, dtype=FLOAT_DTYPES) original_ndim = X.ndim if (original_ndim == 1): X = X.reshape(X.shape[0], 1) s = MinMaxScaler(feature_range=feature_range, copy=copy) if (axis == 0): X = s.fit_transform(X) else: X = s.fit_transform(X.T).T if (original_ndim == 1): X = X.ravel() return X
[ "def", "minmax_scale", "(", "X", ",", "feature_range", "=", "(", "0", ",", "1", ")", ",", "axis", "=", "0", ",", "copy", "=", "True", ")", ":", "X", "=", "check_array", "(", "X", ",", "copy", "=", "False", ",", "ensure_2d", "=", "False", ",", "warn_on_dtype", "=", "True", ",", "dtype", "=", "FLOAT_DTYPES", ")", "original_ndim", "=", "X", ".", "ndim", "if", "(", "original_ndim", "==", "1", ")", ":", "X", "=", "X", ".", "reshape", "(", "X", ".", "shape", "[", "0", "]", ",", "1", ")", "s", "=", "MinMaxScaler", "(", "feature_range", "=", "feature_range", ",", "copy", "=", "copy", ")", "if", "(", "axis", "==", "0", ")", ":", "X", "=", "s", ".", "fit_transform", "(", "X", ")", "else", ":", "X", "=", "s", ".", "fit_transform", "(", "X", ".", "T", ")", ".", "T", "if", "(", "original_ndim", "==", "1", ")", ":", "X", "=", "X", ".", "ravel", "(", ")", "return", "X" ]
transforms features by scaling each feature to a given range .
train
false
22,990
@login_required def reset_api_key(request): request.user.auth_token.delete() Token.objects.create(user=request.user, key=get_random_string(40)) return redirect_profile(u'#api')
[ "@", "login_required", "def", "reset_api_key", "(", "request", ")", ":", "request", ".", "user", ".", "auth_token", ".", "delete", "(", ")", "Token", ".", "objects", ".", "create", "(", "user", "=", "request", ".", "user", ",", "key", "=", "get_random_string", "(", "40", ")", ")", "return", "redirect_profile", "(", "u'#api'", ")" ]
resets user api key .
train
false
22,992
def get_project_type_handler(project_type): global PROJECT_TYPES return PROJECT_TYPES.get(project_type)
[ "def", "get_project_type_handler", "(", "project_type", ")", ":", "global", "PROJECT_TYPES", "return", "PROJECT_TYPES", ".", "get", "(", "project_type", ")" ]
returns the handler for the given project_type .
train
false
22,993
def delete_file_system(filesystemid, keyid=None, key=None, profile=None, region=None, **kwargs): client = _get_conn(key=key, keyid=keyid, profile=profile, region=region) client.delete_file_system(FileSystemId=filesystemid)
[ "def", "delete_file_system", "(", "filesystemid", ",", "keyid", "=", "None", ",", "key", "=", "None", ",", "profile", "=", "None", ",", "region", "=", "None", ",", "**", "kwargs", ")", ":", "client", "=", "_get_conn", "(", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ",", "region", "=", "region", ")", "client", ".", "delete_file_system", "(", "FileSystemId", "=", "filesystemid", ")" ]
deletes a file system .
train
true
22,994
def list_contexts(): return _context_reg.keys()
[ "def", "list_contexts", "(", ")", ":", "return", "_context_reg", ".", "keys", "(", ")" ]
return an iterable of all the registered context names .
train
false
22,996
def list_groups(refresh=False): if (('group.list_groups' in __context__) and (not refresh)): return __context__['group.getent'] ret = [] pythoncom.CoInitialize() nt = win32com.client.Dispatch('AdsNameSpaces') results = nt.GetObject('', 'WinNT://.') results.Filter = ['group'] for result in results: ret.append(result.name) __context__['group.list_groups'] = ret return ret
[ "def", "list_groups", "(", "refresh", "=", "False", ")", ":", "if", "(", "(", "'group.list_groups'", "in", "__context__", ")", "and", "(", "not", "refresh", ")", ")", ":", "return", "__context__", "[", "'group.getent'", "]", "ret", "=", "[", "]", "pythoncom", ".", "CoInitialize", "(", ")", "nt", "=", "win32com", ".", "client", ".", "Dispatch", "(", "'AdsNameSpaces'", ")", "results", "=", "nt", ".", "GetObject", "(", "''", ",", "'WinNT://.'", ")", "results", ".", "Filter", "=", "[", "'group'", "]", "for", "result", "in", "results", ":", "ret", ".", "append", "(", "result", ".", "name", ")", "__context__", "[", "'group.list_groups'", "]", "=", "ret", "return", "ret" ]
return a list of groups the named user belongs to .
train
false
22,997
def _node_func(G): if G.is_multigraph(): def sorted_node(u, v, key): return ((u, v, key) if (u <= v) else (v, u, key)) else: def sorted_node(u, v): return ((u, v) if (u <= v) else (v, u)) return sorted_node
[ "def", "_node_func", "(", "G", ")", ":", "if", "G", ".", "is_multigraph", "(", ")", ":", "def", "sorted_node", "(", "u", ",", "v", ",", "key", ")", ":", "return", "(", "(", "u", ",", "v", ",", "key", ")", "if", "(", "u", "<=", "v", ")", "else", "(", "v", ",", "u", ",", "key", ")", ")", "else", ":", "def", "sorted_node", "(", "u", ",", "v", ")", ":", "return", "(", "(", "u", ",", "v", ")", "if", "(", "u", "<=", "v", ")", "else", "(", "v", ",", "u", ")", ")", "return", "sorted_node" ]
returns a function which returns a sorted node for line graphs .
train
false
22,998
@lru_cache(maxsize=1024) def s_esc(s): return s.replace('/', '\\/')
[ "@", "lru_cache", "(", "maxsize", "=", "1024", ")", "def", "s_esc", "(", "s", ")", ":", "return", "s", ".", "replace", "(", "'/'", ",", "'\\\\/'", ")" ]
equivalent to s .
train
false
22,999
def compat_input(prompt): sys.stdout.write(prompt) sys.stdout.flush() return raw_input()
[ "def", "compat_input", "(", "prompt", ")", ":", "sys", ".", "stdout", ".", "write", "(", "prompt", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "return", "raw_input", "(", ")" ]
cygwins ptys are based on pipes .
train
false
23,001
@with_setup(setup, teardown) def test_show_samples(): rows = 10 cols = 10 m = (rows * cols) model = show_samples.load_model('dbm.pkl', m) dataset = yaml_parse.load(model.dataset_yaml_src) samples_viewer = show_samples.init_viewer(dataset, rows, cols) vis_batch = dataset.get_batch_topo(m) show_samples.update_viewer(dataset, samples_viewer, vis_batch, rows, cols)
[ "@", "with_setup", "(", "setup", ",", "teardown", ")", "def", "test_show_samples", "(", ")", ":", "rows", "=", "10", "cols", "=", "10", "m", "=", "(", "rows", "*", "cols", ")", "model", "=", "show_samples", ".", "load_model", "(", "'dbm.pkl'", ",", "m", ")", "dataset", "=", "yaml_parse", ".", "load", "(", "model", ".", "dataset_yaml_src", ")", "samples_viewer", "=", "show_samples", ".", "init_viewer", "(", "dataset", ",", "rows", ",", "cols", ")", "vis_batch", "=", "dataset", ".", "get_batch_topo", "(", "m", ")", "show_samples", ".", "update_viewer", "(", "dataset", ",", "samples_viewer", ",", "vis_batch", ",", "rows", ",", "cols", ")" ]
test the samples update_viewer function .
train
false
23,003
def _smart_truncate(content, length=100, suffix='...'): if (len(content) <= length): return content return (content[:length].rsplit(' ', 1)[0] + suffix)
[ "def", "_smart_truncate", "(", "content", ",", "length", "=", "100", ",", "suffix", "=", "'...'", ")", ":", "if", "(", "len", "(", "content", ")", "<=", "length", ")", ":", "return", "content", "return", "(", "content", "[", ":", "length", "]", ".", "rsplit", "(", "' '", ",", "1", ")", "[", "0", "]", "+", "suffix", ")" ]
smart truncate .
train
false
23,004
def deltify_pack_objects(objects, window_size=None): if (window_size is None): window_size = DEFAULT_PACK_DELTA_WINDOW_SIZE magic = [] for (obj, path) in objects: magic.append((obj.type_num, path, (- obj.raw_length()), obj)) magic.sort() possible_bases = deque() for (type_num, path, neg_length, o) in magic: raw = o.as_raw_string() winner = raw winner_base = None for base in possible_bases: if (base.type_num != type_num): continue delta = create_delta(base.as_raw_string(), raw) if (len(delta) < len(winner)): winner_base = base.sha().digest() winner = delta (yield (type_num, o.sha().digest(), winner_base, winner)) possible_bases.appendleft(o) while (len(possible_bases) > window_size): possible_bases.pop()
[ "def", "deltify_pack_objects", "(", "objects", ",", "window_size", "=", "None", ")", ":", "if", "(", "window_size", "is", "None", ")", ":", "window_size", "=", "DEFAULT_PACK_DELTA_WINDOW_SIZE", "magic", "=", "[", "]", "for", "(", "obj", ",", "path", ")", "in", "objects", ":", "magic", ".", "append", "(", "(", "obj", ".", "type_num", ",", "path", ",", "(", "-", "obj", ".", "raw_length", "(", ")", ")", ",", "obj", ")", ")", "magic", ".", "sort", "(", ")", "possible_bases", "=", "deque", "(", ")", "for", "(", "type_num", ",", "path", ",", "neg_length", ",", "o", ")", "in", "magic", ":", "raw", "=", "o", ".", "as_raw_string", "(", ")", "winner", "=", "raw", "winner_base", "=", "None", "for", "base", "in", "possible_bases", ":", "if", "(", "base", ".", "type_num", "!=", "type_num", ")", ":", "continue", "delta", "=", "create_delta", "(", "base", ".", "as_raw_string", "(", ")", ",", "raw", ")", "if", "(", "len", "(", "delta", ")", "<", "len", "(", "winner", ")", ")", ":", "winner_base", "=", "base", ".", "sha", "(", ")", ".", "digest", "(", ")", "winner", "=", "delta", "(", "yield", "(", "type_num", ",", "o", ".", "sha", "(", ")", ".", "digest", "(", ")", ",", "winner_base", ",", "winner", ")", ")", "possible_bases", ".", "appendleft", "(", "o", ")", "while", "(", "len", "(", "possible_bases", ")", ">", "window_size", ")", ":", "possible_bases", ".", "pop", "(", ")" ]
generate deltas for pack objects .
train
false
23,005
@hook.command(autohelp=False) def beats(text): if (text.lower() == 'wut'): return 'Instead of hours and minutes, the mean solar day is divided up into 1000 parts called ".beats". Each .beat lasts 1 minute and 26.4 seconds. Times are notated as a 3-digit number out of 1000 after midnight. So, @248 would indicate a time 248 .beats after midnight representing 248/1000 of a day, just over 5 hours and 57 minutes. There are no timezones.' elif (text.lower() == 'guide'): return '1 day = 1000 .beats, 1 hour = 41.666 .beats, 1 min = 0.6944 .beats, 1 second = 0.01157 .beats' t = time.gmtime() (h, m, s) = (t.tm_hour, t.tm_min, t.tm_sec) utc = (((3600 * h) + (60 * m)) + s) bmt = (utc + 3600) beat = (bmt / 86.4) if (beat > 1000): beat -= 1000 return ('Swatch Internet Time: @%06.2f' % beat)
[ "@", "hook", ".", "command", "(", "autohelp", "=", "False", ")", "def", "beats", "(", "text", ")", ":", "if", "(", "text", ".", "lower", "(", ")", "==", "'wut'", ")", ":", "return", "'Instead of hours and minutes, the mean solar day is divided up into 1000 parts called \".beats\". Each .beat lasts 1 minute and 26.4 seconds. Times are notated as a 3-digit number out of 1000 after midnight. So, @248 would indicate a time 248 .beats after midnight representing 248/1000 of a day, just over 5 hours and 57 minutes. There are no timezones.'", "elif", "(", "text", ".", "lower", "(", ")", "==", "'guide'", ")", ":", "return", "'1 day = 1000 .beats, 1 hour = 41.666 .beats, 1 min = 0.6944 .beats, 1 second = 0.01157 .beats'", "t", "=", "time", ".", "gmtime", "(", ")", "(", "h", ",", "m", ",", "s", ")", "=", "(", "t", ".", "tm_hour", ",", "t", ".", "tm_min", ",", "t", ".", "tm_sec", ")", "utc", "=", "(", "(", "(", "3600", "*", "h", ")", "+", "(", "60", "*", "m", ")", ")", "+", "s", ")", "bmt", "=", "(", "utc", "+", "3600", ")", "beat", "=", "(", "bmt", "/", "86.4", ")", "if", "(", "beat", ">", "1000", ")", ":", "beat", "-=", "1000", "return", "(", "'Swatch Internet Time: @%06.2f'", "%", "beat", ")" ]
shows the internet time in swatch beats .
train
false
23,006
def _get_all_files_in_directory(dir_path, excluded_glob_patterns): files_in_directory = [] for (_dir, _, files) in os.walk(dir_path): for file_name in files: filename = os.path.relpath(os.path.join(_dir, file_name), os.getcwd()) if (not any([fnmatch.fnmatch(filename, gp) for gp in excluded_glob_patterns])): files_in_directory.append(filename) return files_in_directory
[ "def", "_get_all_files_in_directory", "(", "dir_path", ",", "excluded_glob_patterns", ")", ":", "files_in_directory", "=", "[", "]", "for", "(", "_dir", ",", "_", ",", "files", ")", "in", "os", ".", "walk", "(", "dir_path", ")", ":", "for", "file_name", "in", "files", ":", "filename", "=", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "_dir", ",", "file_name", ")", ",", "os", ".", "getcwd", "(", ")", ")", "if", "(", "not", "any", "(", "[", "fnmatch", ".", "fnmatch", "(", "filename", ",", "gp", ")", "for", "gp", "in", "excluded_glob_patterns", "]", ")", ")", ":", "files_in_directory", ".", "append", "(", "filename", ")", "return", "files_in_directory" ]
recursively collects all files in directory and subdirectories of specified path .
train
false
23,007
def actor_url(parser, token): bits = token.split_contents() if (len(bits) != 2): raise TemplateSyntaxError('Accepted format {% actor_url [actor_instance] %}') else: return DisplayActivityActorUrl(*bits[1:])
[ "def", "actor_url", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "2", ")", ":", "raise", "TemplateSyntaxError", "(", "'Accepted format {% actor_url [actor_instance] %}'", ")", "else", ":", "return", "DisplayActivityActorUrl", "(", "*", "bits", "[", "1", ":", "]", ")" ]
renders the url for a particular actor instance <a href="{% actor_url request .
train
true
23,008
def log_query_count(name_point): log.info((u'At point named `%s` total of %s queries were ran' % (name_point, query_count)))
[ "def", "log_query_count", "(", "name_point", ")", ":", "log", ".", "info", "(", "(", "u'At point named `%s` total of %s queries were ran'", "%", "(", "name_point", ",", "query_count", ")", ")", ")" ]
debugging purposes .
train
false
23,009
def list_runners(*args): run_ = salt.runner.Runner(__opts__) runners = set() if (not args): for func in run_.functions: runners.add(func.split('.')[0]) return sorted(runners) for module in args: if ('*' in module): for func in fnmatch.filter(run_.functions, module): runners.add(func.split('.')[0]) else: for func in run_.functions: mod_test = func.split('.')[0] if (mod_test == module): runners.add(mod_test) return sorted(runners)
[ "def", "list_runners", "(", "*", "args", ")", ":", "run_", "=", "salt", ".", "runner", ".", "Runner", "(", "__opts__", ")", "runners", "=", "set", "(", ")", "if", "(", "not", "args", ")", ":", "for", "func", "in", "run_", ".", "functions", ":", "runners", ".", "add", "(", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "return", "sorted", "(", "runners", ")", "for", "module", "in", "args", ":", "if", "(", "'*'", "in", "module", ")", ":", "for", "func", "in", "fnmatch", ".", "filter", "(", "run_", ".", "functions", ",", "module", ")", ":", "runners", ".", "add", "(", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "else", ":", "for", "func", "in", "run_", ".", "functions", ":", "mod_test", "=", "func", ".", "split", "(", "'.'", ")", "[", "0", "]", "if", "(", "mod_test", "==", "module", ")", ":", "runners", ".", "add", "(", "mod_test", ")", "return", "sorted", "(", "runners", ")" ]
list the runners loaded on the minion .
train
true
23,010
@task def test_admin(ctx): module = 'admin_tests/' module_fmt = (' '.join(module) if isinstance(module, list) else module) admin_tasks.manage(ctx, 'test {}'.format(module_fmt))
[ "@", "task", "def", "test_admin", "(", "ctx", ")", ":", "module", "=", "'admin_tests/'", "module_fmt", "=", "(", "' '", ".", "join", "(", "module", ")", "if", "isinstance", "(", "module", ",", "list", ")", "else", "module", ")", "admin_tasks", ".", "manage", "(", "ctx", ",", "'test {}'", ".", "format", "(", "module_fmt", ")", ")" ]
run the admin test suite .
train
false
23,011
def get_os_ca_bundle_path(): for path in POSSIBLE_CA_BUNDLE_PATHS: if os.path.exists(path): return path return None
[ "def", "get_os_ca_bundle_path", "(", ")", ":", "for", "path", "in", "POSSIBLE_CA_BUNDLE_PATHS", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "path", "return", "None" ]
try to pick an available ca certificate bundle provided by the os .
train
false
23,013
def qos_specs_get(context, qos_specs_id): return IMPL.qos_specs_get(context, qos_specs_id)
[ "def", "qos_specs_get", "(", "context", ",", "qos_specs_id", ")", ":", "return", "IMPL", ".", "qos_specs_get", "(", "context", ",", "qos_specs_id", ")" ]
get all specification for a given qos_specs .
train
false
23,014
def inch(value): return dpi2px(value, 'in')
[ "def", "inch", "(", "value", ")", ":", "return", "dpi2px", "(", "value", ",", "'in'", ")" ]
convert from inches to pixels .
train
false
23,015
@register.inclusion_tag('horizon/common/_progress_bar.html') def horizon_progress_bar(current_val, max_val): return {'current_val': current_val, 'max_val': max_val}
[ "@", "register", ".", "inclusion_tag", "(", "'horizon/common/_progress_bar.html'", ")", "def", "horizon_progress_bar", "(", "current_val", ",", "max_val", ")", ":", "return", "{", "'current_val'", ":", "current_val", ",", "'max_val'", ":", "max_val", "}" ]
renders a progress bar based on parameters passed to the tag .
train
false
23,016
def list_tasks(location='\\'): pythoncom.CoInitialize() task_service = win32com.client.Dispatch('Schedule.Service') task_service.Connect() task_folder = task_service.GetFolder(location) tasks = task_folder.GetTasks(0) ret = [] for task in tasks: ret.append(task.Name) return ret
[ "def", "list_tasks", "(", "location", "=", "'\\\\'", ")", ":", "pythoncom", ".", "CoInitialize", "(", ")", "task_service", "=", "win32com", ".", "client", ".", "Dispatch", "(", "'Schedule.Service'", ")", "task_service", ".", "Connect", "(", ")", "task_folder", "=", "task_service", ".", "GetFolder", "(", "location", ")", "tasks", "=", "task_folder", ".", "GetTasks", "(", "0", ")", "ret", "=", "[", "]", "for", "task", "in", "tasks", ":", "ret", ".", "append", "(", "task", ".", "Name", ")", "return", "ret" ]
list all tasks located in a specific location in the task scheduler .
train
false
23,017
def tag_for_tool(tool): return slugify(tool.name, delim='-')
[ "def", "tag_for_tool", "(", "tool", ")", ":", "return", "slugify", "(", "tool", ".", "name", ",", "delim", "=", "'-'", ")" ]
generate a reasonable biostar tag for a tool .
train
false
23,018
def update_api_model_schema(restApiId, modelName, schema, region=None, key=None, keyid=None, profile=None): try: schema_json = (json.dumps(schema) if isinstance(schema, dict) else schema) conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) response = _api_model_patch_replace(conn, restApiId, modelName, '/schema', schema_json) return {'updated': True, 'model': _convert_datetime_str(response)} except ClientError as e: return {'updated': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "update_api_model_schema", "(", "restApiId", ",", "modelName", ",", "schema", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "schema_json", "=", "(", "json", ".", "dumps", "(", "schema", ")", "if", "isinstance", "(", "schema", ",", "dict", ")", "else", "schema", ")", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "response", "=", "_api_model_patch_replace", "(", "conn", ",", "restApiId", ",", "modelName", ",", "'/schema'", ",", "schema_json", ")", "return", "{", "'updated'", ":", "True", ",", "'model'", ":", "_convert_datetime_str", "(", "response", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'updated'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
update the schema for the given model in the given restapiid cli example: .
train
true
23,019
def find_header(hdr): for dir in ['/usr/include', '/usr/local/include']: file = os.path.join(dir, hdr) if os.path.exists(file): return file raise ValueError(('Missing header: %s' % hdr))
[ "def", "find_header", "(", "hdr", ")", ":", "for", "dir", "in", "[", "'/usr/include'", ",", "'/usr/local/include'", "]", ":", "file", "=", "os", ".", "path", ".", "join", "(", "dir", ",", "hdr", ")", "if", "os", ".", "path", ".", "exists", "(", "file", ")", ":", "return", "file", "raise", "ValueError", "(", "(", "'Missing header: %s'", "%", "hdr", ")", ")" ]
find a given header in the system .
train
false
23,020
def is_nanpa_country(region_code): return (region_code in _NANPA_REGIONS)
[ "def", "is_nanpa_country", "(", "region_code", ")", ":", "return", "(", "region_code", "in", "_NANPA_REGIONS", ")" ]
checks if this region is a nanpa region .
train
false
23,021
def init_evolutions(app, created_models, **kwargs): if (FileDiff in created_models): return try: latest_version = django_evolution.Version.objects.latest(u'when') except django_evolution.Version.DoesNotExist: latest_version = None if latest_version: try: FileDiff.objects.filter(parent_diff64=u'') return except: django_evolution.Version.objects.all().delete() django_evolution.Evolution.objects.all().delete() call_command(u'loaddata', u'admin/fixtures/initial_evolution_schema.json', verbosity=0)
[ "def", "init_evolutions", "(", "app", ",", "created_models", ",", "**", "kwargs", ")", ":", "if", "(", "FileDiff", "in", "created_models", ")", ":", "return", "try", ":", "latest_version", "=", "django_evolution", ".", "Version", ".", "objects", ".", "latest", "(", "u'when'", ")", "except", "django_evolution", ".", "Version", ".", "DoesNotExist", ":", "latest_version", "=", "None", "if", "latest_version", ":", "try", ":", "FileDiff", ".", "objects", ".", "filter", "(", "parent_diff64", "=", "u''", ")", "return", "except", ":", "django_evolution", ".", "Version", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")", "django_evolution", ".", "Evolution", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")", "call_command", "(", "u'loaddata'", ",", "u'admin/fixtures/initial_evolution_schema.json'", ",", "verbosity", "=", "0", ")" ]
attempt to initialize the django evolution schema signatures .
train
false
23,022
def clean_txt(txt): if isbytestring(txt): txt = txt.decode('utf-8', 'replace') txt = '\n'.join([line.rstrip() for line in txt.splitlines()]) txt = re.sub('(?m)(?<=^)([ ]{2,}| DCTB +)(?=.)', ('&nbsp;' * 4), txt) txt = re.sub('[ ]{2,}', ' ', txt) txt = re.sub('^\\s+(?=.)', '', txt) txt = re.sub('(?<=.)\\s+$', '', txt) txt = re.sub('\n{5,}', '\n\n\n\n', txt) txt = clean_ascii_chars(txt) return txt
[ "def", "clean_txt", "(", "txt", ")", ":", "if", "isbytestring", "(", "txt", ")", ":", "txt", "=", "txt", ".", "decode", "(", "'utf-8'", ",", "'replace'", ")", "txt", "=", "'\\n'", ".", "join", "(", "[", "line", ".", "rstrip", "(", ")", "for", "line", "in", "txt", ".", "splitlines", "(", ")", "]", ")", "txt", "=", "re", ".", "sub", "(", "'(?m)(?<=^)([ ]{2,}| DCTB +)(?=.)'", ",", "(", "'&nbsp;'", "*", "4", ")", ",", "txt", ")", "txt", "=", "re", ".", "sub", "(", "'[ ]{2,}'", ",", "' '", ",", "txt", ")", "txt", "=", "re", ".", "sub", "(", "'^\\\\s+(?=.)'", ",", "''", ",", "txt", ")", "txt", "=", "re", ".", "sub", "(", "'(?<=.)\\\\s+$'", ",", "''", ",", "txt", ")", "txt", "=", "re", ".", "sub", "(", "'\\n{5,}'", ",", "'\\n\\n\\n\\n'", ",", "txt", ")", "txt", "=", "clean_ascii_chars", "(", "txt", ")", "return", "txt" ]
run transformations on the text to put it into consistent state .
train
false
23,026
def check_bgp(net_connect, cmd='show run | inc router bgp'): output = net_connect.send_command_expect(cmd) return ('bgp' in output)
[ "def", "check_bgp", "(", "net_connect", ",", "cmd", "=", "'show run | inc router bgp'", ")", ":", "output", "=", "net_connect", ".", "send_command_expect", "(", "cmd", ")", "return", "(", "'bgp'", "in", "output", ")" ]
check whether bgp is currently configured on device .
train
false
23,028
def read_str(fid, count=1): dtype = np.dtype(('>S%i' % count)) string = fid.read(dtype.itemsize) data = np.fromstring(string, dtype=dtype)[0] bytestr = b('').join([data[0:(data.index(b('\x00')) if (b('\x00') in data) else count)]]) return str(bytestr.decode('ascii'))
[ "def", "read_str", "(", "fid", ",", "count", "=", "1", ")", ":", "dtype", "=", "np", ".", "dtype", "(", "(", "'>S%i'", "%", "count", ")", ")", "string", "=", "fid", ".", "read", "(", "dtype", ".", "itemsize", ")", "data", "=", "np", ".", "fromstring", "(", "string", ",", "dtype", "=", "dtype", ")", "[", "0", "]", "bytestr", "=", "b", "(", "''", ")", ".", "join", "(", "[", "data", "[", "0", ":", "(", "data", ".", "index", "(", "b", "(", "'\\x00'", ")", ")", "if", "(", "b", "(", "'\\x00'", ")", "in", "data", ")", "else", "count", ")", "]", "]", ")", "return", "str", "(", "bytestr", ".", "decode", "(", "'ascii'", ")", ")" ]
if a python string literal begins at the specified position in the given string .
train
false
23,029
def referer_str(request): referrer = request.headers.get('Referer') if (referrer is None): return referrer return to_native_str(referrer, errors='replace')
[ "def", "referer_str", "(", "request", ")", ":", "referrer", "=", "request", ".", "headers", ".", "get", "(", "'Referer'", ")", "if", "(", "referrer", "is", "None", ")", ":", "return", "referrer", "return", "to_native_str", "(", "referrer", ",", "errors", "=", "'replace'", ")" ]
return referer http header suitable for logging .
train
false
23,030
def config_set(key, value=None, multivar=None, cwd=None, user=None, password=None, ignore_retcode=False, **kwargs): kwargs = salt.utils.clean_kwargs(**kwargs) add_ = kwargs.pop('add', False) global_ = kwargs.pop('global', False) if kwargs: salt.utils.invalid_kwargs(kwargs) if (cwd is None): if (not global_): raise SaltInvocationError("'cwd' argument required unless global=True") else: cwd = _expand_path(cwd, user) if all(((x is not None) for x in (value, multivar))): raise SaltInvocationError("Only one of 'value' and 'multivar' is permitted") if (value is not None): if (not isinstance(value, six.string_types)): value = str(value) if (multivar is not None): if (not isinstance(multivar, list)): try: multivar = multivar.split(',') except AttributeError: multivar = str(multivar).split(',') else: new_multivar = [] for item in multivar: if isinstance(item, six.string_types): new_multivar.append(item) else: new_multivar.append(str(item)) multivar = new_multivar command_prefix = ['git', 'config'] if global_: command_prefix.append('--global') if (value is not None): command = copy.copy(command_prefix) if add_: command.append('--add') else: command.append('--replace-all') command.extend([key, value]) _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode) else: for (idx, target) in enumerate(multivar): command = copy.copy(command_prefix) if (idx == 0): command.append('--replace-all') else: command.append('--add') command.extend([key, target]) _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode) return config_get(key, user=user, password=password, cwd=cwd, ignore_retcode=ignore_retcode, **{'all': True, 'global': global_})
[ "def", "config_set", "(", "key", ",", "value", "=", "None", ",", "multivar", "=", "None", ",", "cwd", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ",", "ignore_retcode", "=", "False", ",", "**", "kwargs", ")", ":", "kwargs", "=", "salt", ".", "utils", ".", "clean_kwargs", "(", "**", "kwargs", ")", "add_", "=", "kwargs", ".", "pop", "(", "'add'", ",", "False", ")", "global_", "=", "kwargs", ".", "pop", "(", "'global'", ",", "False", ")", "if", "kwargs", ":", "salt", ".", "utils", ".", "invalid_kwargs", "(", "kwargs", ")", "if", "(", "cwd", "is", "None", ")", ":", "if", "(", "not", "global_", ")", ":", "raise", "SaltInvocationError", "(", "\"'cwd' argument required unless global=True\"", ")", "else", ":", "cwd", "=", "_expand_path", "(", "cwd", ",", "user", ")", "if", "all", "(", "(", "(", "x", "is", "not", "None", ")", "for", "x", "in", "(", "value", ",", "multivar", ")", ")", ")", ":", "raise", "SaltInvocationError", "(", "\"Only one of 'value' and 'multivar' is permitted\"", ")", "if", "(", "value", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ")", ":", "value", "=", "str", "(", "value", ")", "if", "(", "multivar", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "multivar", ",", "list", ")", ")", ":", "try", ":", "multivar", "=", "multivar", ".", "split", "(", "','", ")", "except", "AttributeError", ":", "multivar", "=", "str", "(", "multivar", ")", ".", "split", "(", "','", ")", "else", ":", "new_multivar", "=", "[", "]", "for", "item", "in", "multivar", ":", "if", "isinstance", "(", "item", ",", "six", ".", "string_types", ")", ":", "new_multivar", ".", "append", "(", "item", ")", "else", ":", "new_multivar", ".", "append", "(", "str", "(", "item", ")", ")", "multivar", "=", "new_multivar", "command_prefix", "=", "[", "'git'", ",", "'config'", "]", "if", "global_", ":", "command_prefix", ".", "append", "(", "'--global'", ")", "if", "(", "value", "is", "not", "None", ")", ":", "command", "=", "copy", ".", "copy", "(", "command_prefix", ")", "if", "add_", ":", "command", ".", "append", "(", "'--add'", ")", "else", ":", "command", ".", "append", "(", "'--replace-all'", ")", "command", ".", "extend", "(", "[", "key", ",", "value", "]", ")", "_git_run", "(", "command", ",", "cwd", "=", "cwd", ",", "user", "=", "user", ",", "password", "=", "password", ",", "ignore_retcode", "=", "ignore_retcode", ")", "else", ":", "for", "(", "idx", ",", "target", ")", "in", "enumerate", "(", "multivar", ")", ":", "command", "=", "copy", ".", "copy", "(", "command_prefix", ")", "if", "(", "idx", "==", "0", ")", ":", "command", ".", "append", "(", "'--replace-all'", ")", "else", ":", "command", ".", "append", "(", "'--add'", ")", "command", ".", "extend", "(", "[", "key", ",", "target", "]", ")", "_git_run", "(", "command", ",", "cwd", "=", "cwd", ",", "user", "=", "user", ",", "password", "=", "password", ",", "ignore_retcode", "=", "ignore_retcode", ")", "return", "config_get", "(", "key", ",", "user", "=", "user", ",", "password", "=", "password", ",", "cwd", "=", "cwd", ",", "ignore_retcode", "=", "ignore_retcode", ",", "**", "{", "'all'", ":", "True", ",", "'global'", ":", "global_", "}", ")" ]
set redis server configuration values cli example: .
train
true
23,032
def _read_complex_double(fid, tag, shape, rlims): if (shape is not None): shape = (shape[0], (shape[1] * 2)) d = _fromstring_rows(fid, tag.size, dtype='>f8', shape=shape, rlims=rlims) d = (d[::2] + (1j * d[1::2])) return d
[ "def", "_read_complex_double", "(", "fid", ",", "tag", ",", "shape", ",", "rlims", ")", ":", "if", "(", "shape", "is", "not", "None", ")", ":", "shape", "=", "(", "shape", "[", "0", "]", ",", "(", "shape", "[", "1", "]", "*", "2", ")", ")", "d", "=", "_fromstring_rows", "(", "fid", ",", "tag", ".", "size", ",", "dtype", "=", "'>f8'", ",", "shape", "=", "shape", ",", "rlims", "=", "rlims", ")", "d", "=", "(", "d", "[", ":", ":", "2", "]", "+", "(", "1j", "*", "d", "[", "1", ":", ":", "2", "]", ")", ")", "return", "d" ]
read complex double tag .
train
false
23,033
def new_local_dict(): d = {} _local_dict_stack.append(d) return d
[ "def", "new_local_dict", "(", ")", ":", "d", "=", "{", "}", "_local_dict_stack", ".", "append", "(", "d", ")", "return", "d" ]
initialize a new local dictionary & push it onto the stack .
train
false
23,034
def survey_answerlist_dataTable_post(r): current.response.s3.actions = [{'label': str(current.messages.UPDATE), '_class': 'action-btn edit', 'url': URL(c='survey', f='series', args=[r.id, 'complete', '[id]', 'update'])}]
[ "def", "survey_answerlist_dataTable_post", "(", "r", ")", ":", "current", ".", "response", ".", "s3", ".", "actions", "=", "[", "{", "'label'", ":", "str", "(", "current", ".", "messages", ".", "UPDATE", ")", ",", "'_class'", ":", "'action-btn edit'", ",", "'url'", ":", "URL", "(", "c", "=", "'survey'", ",", "f", "=", "'series'", ",", "args", "=", "[", "r", ".", "id", ",", "'complete'", ",", "'[id]'", ",", "'update'", "]", ")", "}", "]" ]
replace action buttons .
train
false
23,035
def dup_sqf_p(f, K): if (not f): return True else: return (not dup_degree(dup_gcd(f, dup_diff(f, 1, K), K)))
[ "def", "dup_sqf_p", "(", "f", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "return", "True", "else", ":", "return", "(", "not", "dup_degree", "(", "dup_gcd", "(", "f", ",", "dup_diff", "(", "f", ",", "1", ",", "K", ")", ",", "K", ")", ")", ")" ]
return true if f is a square-free polynomial in k[x] .
train
false
23,036
def dump_file(filename, head=None): if (head is None): log.info(('%s' % filename)) else: log.info(head) file = open(filename) try: log.info(file.read()) finally: file.close()
[ "def", "dump_file", "(", "filename", ",", "head", "=", "None", ")", ":", "if", "(", "head", "is", "None", ")", ":", "log", ".", "info", "(", "(", "'%s'", "%", "filename", ")", ")", "else", ":", "log", ".", "info", "(", "head", ")", "file", "=", "open", "(", "filename", ")", "try", ":", "log", ".", "info", "(", "file", ".", "read", "(", ")", ")", "finally", ":", "file", ".", "close", "(", ")" ]
dumps a file content into log .
train
false
23,037
def ip_bracket(addr): if (addr and (':' in addr) and (not addr.startswith('['))): return '[{0}]'.format(addr) return addr
[ "def", "ip_bracket", "(", "addr", ")", ":", "if", "(", "addr", "and", "(", "':'", "in", "addr", ")", "and", "(", "not", "addr", ".", "startswith", "(", "'['", ")", ")", ")", ":", "return", "'[{0}]'", ".", "format", "(", "addr", ")", "return", "addr" ]
convert ip address representation to zmq format .
train
false
23,039
def stubout_lookup_image(stubs): def f(_1, _2, _3, _4): raise Exception('Test Exception raised by fake lookup_image') stubs.Set(vm_utils, 'lookup_image', f)
[ "def", "stubout_lookup_image", "(", "stubs", ")", ":", "def", "f", "(", "_1", ",", "_2", ",", "_3", ",", "_4", ")", ":", "raise", "Exception", "(", "'Test Exception raised by fake lookup_image'", ")", "stubs", ".", "Set", "(", "vm_utils", ",", "'lookup_image'", ",", "f", ")" ]
simulates a failure in lookup image .
train
false
23,041
def should_write(write_opt=None): return _bool_fallback(write_opt, config['import']['write'].get(bool))
[ "def", "should_write", "(", "write_opt", "=", "None", ")", ":", "return", "_bool_fallback", "(", "write_opt", ",", "config", "[", "'import'", "]", "[", "'write'", "]", ".", "get", "(", "bool", ")", ")" ]
decide whether a command that updates metadata should also write tags .
train
false
23,042
def iterator(dictionary): if (PY_MAJOR_VERSION > 2): return dictionary.items() else: return dictionary.iteritems()
[ "def", "iterator", "(", "dictionary", ")", ":", "if", "(", "PY_MAJOR_VERSION", ">", "2", ")", ":", "return", "dictionary", ".", "items", "(", ")", "else", ":", "return", "dictionary", ".", "iteritems", "(", ")" ]
for cross compatibility between python 2 and python 3 dictionaries .
train
false
23,043
def _get_incdec_value(match, incdec, url, count): (pre, zeroes, number, post) = match.groups() val = int(number) if (incdec == 'decrement'): if (val <= 0): raise IncDecError("Can't decrement {}!".format(val), url) val -= count elif (incdec == 'increment'): val += count else: raise ValueError('Invalid value {} for indec!'.format(incdec)) if zeroes: if (len(number) < len(str(val))): zeroes = zeroes[1:] elif (len(number) > len(str(val))): zeroes += '0' return ''.join([pre, zeroes, str(val), post])
[ "def", "_get_incdec_value", "(", "match", ",", "incdec", ",", "url", ",", "count", ")", ":", "(", "pre", ",", "zeroes", ",", "number", ",", "post", ")", "=", "match", ".", "groups", "(", ")", "val", "=", "int", "(", "number", ")", "if", "(", "incdec", "==", "'decrement'", ")", ":", "if", "(", "val", "<=", "0", ")", ":", "raise", "IncDecError", "(", "\"Can't decrement {}!\"", ".", "format", "(", "val", ")", ",", "url", ")", "val", "-=", "count", "elif", "(", "incdec", "==", "'increment'", ")", ":", "val", "+=", "count", "else", ":", "raise", "ValueError", "(", "'Invalid value {} for indec!'", ".", "format", "(", "incdec", ")", ")", "if", "zeroes", ":", "if", "(", "len", "(", "number", ")", "<", "len", "(", "str", "(", "val", ")", ")", ")", ":", "zeroes", "=", "zeroes", "[", "1", ":", "]", "elif", "(", "len", "(", "number", ")", ">", "len", "(", "str", "(", "val", ")", ")", ")", ":", "zeroes", "+=", "'0'", "return", "''", ".", "join", "(", "[", "pre", ",", "zeroes", ",", "str", "(", "val", ")", ",", "post", "]", ")" ]
get an incremented/decremented url based on a url match .
train
false
23,044
def _empty_bucket(bucket): for blob in bucket.list_blobs(): try: blob.delete() except exceptions.NotFound: pass
[ "def", "_empty_bucket", "(", "bucket", ")", ":", "for", "blob", "in", "bucket", ".", "list_blobs", "(", ")", ":", "try", ":", "blob", ".", "delete", "(", ")", "except", "exceptions", ".", "NotFound", ":", "pass" ]
empty a bucket of all existing blobs .
train
false
23,045
def _insert_axis_in_shape(context, builder, orig_shape, ndim, axis): assert (len(orig_shape) == (ndim - 1)) ll_shty = ir.ArrayType(cgutils.intp_t, ndim) shapes = cgutils.alloca_once(builder, ll_shty) one = cgutils.intp_t(1) for dim in range((ndim - 1)): ll_dim = cgutils.intp_t(dim) after_axis = builder.icmp_signed('>=', ll_dim, axis) sh = orig_shape[dim] idx = builder.select(after_axis, builder.add(ll_dim, one), ll_dim) builder.store(sh, cgutils.gep_inbounds(builder, shapes, 0, idx)) builder.store(one, cgutils.gep_inbounds(builder, shapes, 0, axis)) return cgutils.unpack_tuple(builder, builder.load(shapes))
[ "def", "_insert_axis_in_shape", "(", "context", ",", "builder", ",", "orig_shape", ",", "ndim", ",", "axis", ")", ":", "assert", "(", "len", "(", "orig_shape", ")", "==", "(", "ndim", "-", "1", ")", ")", "ll_shty", "=", "ir", ".", "ArrayType", "(", "cgutils", ".", "intp_t", ",", "ndim", ")", "shapes", "=", "cgutils", ".", "alloca_once", "(", "builder", ",", "ll_shty", ")", "one", "=", "cgutils", ".", "intp_t", "(", "1", ")", "for", "dim", "in", "range", "(", "(", "ndim", "-", "1", ")", ")", ":", "ll_dim", "=", "cgutils", ".", "intp_t", "(", "dim", ")", "after_axis", "=", "builder", ".", "icmp_signed", "(", "'>='", ",", "ll_dim", ",", "axis", ")", "sh", "=", "orig_shape", "[", "dim", "]", "idx", "=", "builder", ".", "select", "(", "after_axis", ",", "builder", ".", "add", "(", "ll_dim", ",", "one", ")", ",", "ll_dim", ")", "builder", ".", "store", "(", "sh", ",", "cgutils", ".", "gep_inbounds", "(", "builder", ",", "shapes", ",", "0", ",", "idx", ")", ")", "builder", ".", "store", "(", "one", ",", "cgutils", ".", "gep_inbounds", "(", "builder", ",", "shapes", ",", "0", ",", "axis", ")", ")", "return", "cgutils", ".", "unpack_tuple", "(", "builder", ",", "builder", ".", "load", "(", "shapes", ")", ")" ]
compute shape with the new axis inserted e .
train
false
23,046
def key_type_n(index): def transform_function(key): id_or_name = _key_id_or_name_n(key, index) if (id_or_name is None): return '' if isinstance(id_or_name, basestring): return KEY_TYPE_NAME return KEY_TYPE_ID return transform_function
[ "def", "key_type_n", "(", "index", ")", ":", "def", "transform_function", "(", "key", ")", ":", "id_or_name", "=", "_key_id_or_name_n", "(", "key", ",", "index", ")", "if", "(", "id_or_name", "is", "None", ")", ":", "return", "''", "if", "isinstance", "(", "id_or_name", ",", "basestring", ")", ":", "return", "KEY_TYPE_NAME", "return", "KEY_TYPE_ID", "return", "transform_function" ]
pull out the nth key type from a key which has parents .
train
false
23,047
def getipaddrinfo(host): try: return [addrinfo for addrinfo in socket.getaddrinfo(host, None) if (((addrinfo[0] == socket.AF_INET) or (addrinfo[0] == socket.AF_INET6)) and isinstance(addrinfo[4][0], basestring))] except socket.error: return []
[ "def", "getipaddrinfo", "(", "host", ")", ":", "try", ":", "return", "[", "addrinfo", "for", "addrinfo", "in", "socket", ".", "getaddrinfo", "(", "host", ",", "None", ")", "if", "(", "(", "(", "addrinfo", "[", "0", "]", "==", "socket", ".", "AF_INET", ")", "or", "(", "addrinfo", "[", "0", "]", "==", "socket", ".", "AF_INET6", ")", ")", "and", "isinstance", "(", "addrinfo", "[", "4", "]", "[", "0", "]", ",", "basestring", ")", ")", "]", "except", "socket", ".", "error", ":", "return", "[", "]" ]
filter out non-ip and bad ip addresses from getaddrinfo .
train
false
23,048
@cache_permission def can_lock_translation(user, project): return check_permission(user, project, 'trans.lock_translation')
[ "@", "cache_permission", "def", "can_lock_translation", "(", "user", ",", "project", ")", ":", "return", "check_permission", "(", "user", ",", "project", ",", "'trans.lock_translation'", ")" ]
checks whether user can lock translation .
train
false
23,049
def strip_bom(data): for bom in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE, codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE, codecs.BOM_UTF8): if data.startswith(bom): data = data[len(bom):] break return data
[ "def", "strip_bom", "(", "data", ")", ":", "for", "bom", "in", "(", "codecs", ".", "BOM_UTF32_BE", ",", "codecs", ".", "BOM_UTF32_LE", ",", "codecs", ".", "BOM_UTF16_BE", ",", "codecs", ".", "BOM_UTF16_LE", ",", "codecs", ".", "BOM_UTF8", ")", ":", "if", "data", ".", "startswith", "(", "bom", ")", ":", "data", "=", "data", "[", "len", "(", "bom", ")", ":", "]", "break", "return", "data" ]
strip the bom from byte string data .
train
false
23,050
def remove_section(file_name, section, separator='='): inifile = _Ini.get_ini_file(file_name, separator=separator) section = inifile.pop(section, {}) inifile.flush() ret = {} for (key, value) in six.iteritems(section): if (key[0] != '#'): ret.update({key: value}) return ret
[ "def", "remove_section", "(", "file_name", ",", "section", ",", "separator", "=", "'='", ")", ":", "inifile", "=", "_Ini", ".", "get_ini_file", "(", "file_name", ",", "separator", "=", "separator", ")", "section", "=", "inifile", ".", "pop", "(", "section", ",", "{", "}", ")", "inifile", ".", "flush", "(", ")", "ret", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "section", ")", ":", "if", "(", "key", "[", "0", "]", "!=", "'#'", ")", ":", "ret", ".", "update", "(", "{", "key", ":", "value", "}", ")", "return", "ret" ]
remove a section in an ini file .
train
true
23,051
def _lookup_style(element, names): return (_STYLES.get(('_' + element), '') + ''.join([_STYLES.get(name, '') for name in names]))
[ "def", "_lookup_style", "(", "element", ",", "names", ")", ":", "return", "(", "_STYLES", ".", "get", "(", "(", "'_'", "+", "element", ")", ",", "''", ")", "+", "''", ".", "join", "(", "[", "_STYLES", ".", "get", "(", "name", ",", "''", ")", "for", "name", "in", "names", "]", ")", ")" ]
lookup style by either element name or the list of classes .
train
true
23,052
def gender(word, pos=NOUN): w = word.lower() if (pos == NOUN): if w.endswith(gender_masculine): return MASCULINE if w.endswith(gender_feminine): return FEMININE if w.endswith(gender_neuter): return NEUTER for g in gender_majority_vote: if w.endswith(gender_majority_vote[g]): return g
[ "def", "gender", "(", "word", ",", "pos", "=", "NOUN", ")", ":", "w", "=", "word", ".", "lower", "(", ")", "if", "(", "pos", "==", "NOUN", ")", ":", "if", "w", ".", "endswith", "(", "gender_masculine", ")", ":", "return", "MASCULINE", "if", "w", ".", "endswith", "(", "gender_feminine", ")", ":", "return", "FEMININE", "if", "w", ".", "endswith", "(", "gender_neuter", ")", ":", "return", "NEUTER", "for", "g", "in", "gender_majority_vote", ":", "if", "w", ".", "endswith", "(", "gender_majority_vote", "[", "g", "]", ")", ":", "return", "g" ]
returns the gender for nouns .
train
true
23,053
def _to_int(byte_string): return int(byte_string[::(-1)].encode('hex'), 16)
[ "def", "_to_int", "(", "byte_string", ")", ":", "return", "int", "(", "byte_string", "[", ":", ":", "(", "-", "1", ")", "]", ".", "encode", "(", "'hex'", ")", ",", "16", ")" ]
convert a string of bytes to int .
train
false
23,055
def HashPassword(password, salt): prf = (lambda p, s: HMAC.new(p, s, SHA512).digest()) return base64.b64encode(PBKDF2(escape.utf8(password), base64.b64decode(salt), count=1000, prf=prf))
[ "def", "HashPassword", "(", "password", ",", "salt", ")", ":", "prf", "=", "(", "lambda", "p", ",", "s", ":", "HMAC", ".", "new", "(", "p", ",", "s", ",", "SHA512", ")", ".", "digest", "(", ")", ")", "return", "base64", ".", "b64encode", "(", "PBKDF2", "(", "escape", ".", "utf8", "(", "password", ")", ",", "base64", ".", "b64decode", "(", "salt", ")", ",", "count", "=", "1000", ",", "prf", "=", "prf", ")", ")" ]
computes the hash of the given password using 1000 sha512 iterations .
train
false
23,056
def publish_over_ssh(registry, xml_parent, data): ssh(registry, xml_parent, data)
[ "def", "publish_over_ssh", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "ssh", "(", "registry", ",", "xml_parent", ",", "data", ")" ]
yaml: publish-over-ssh send files or execute commands over ssh .
train
false
23,057
def get_systemd_os_info(filepath='/etc/os-release'): os_name = _get_systemd_os_release_var('ID', filepath=filepath) os_version = _get_systemd_os_release_var('VERSION_ID', filepath=filepath) return (os_name, os_version)
[ "def", "get_systemd_os_info", "(", "filepath", "=", "'/etc/os-release'", ")", ":", "os_name", "=", "_get_systemd_os_release_var", "(", "'ID'", ",", "filepath", "=", "filepath", ")", "os_version", "=", "_get_systemd_os_release_var", "(", "'VERSION_ID'", ",", "filepath", "=", "filepath", ")", "return", "(", "os_name", ",", "os_version", ")" ]
parse systemd /etc/os-release for distribution information .
train
false
23,058
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
23,061
def server_exists(s_name, ip=None, s_state=None, **connection_args): server = _server_get(s_name, **connection_args) if (server is None): return False if ((ip is not None) and (ip != server.get_ipaddress())): return False if ((s_state is not None) and (s_state.upper() != server.get_state())): return False return True
[ "def", "server_exists", "(", "s_name", ",", "ip", "=", "None", ",", "s_state", "=", "None", ",", "**", "connection_args", ")", ":", "server", "=", "_server_get", "(", "s_name", ",", "**", "connection_args", ")", "if", "(", "server", "is", "None", ")", ":", "return", "False", "if", "(", "(", "ip", "is", "not", "None", ")", "and", "(", "ip", "!=", "server", ".", "get_ipaddress", "(", ")", ")", ")", ":", "return", "False", "if", "(", "(", "s_state", "is", "not", "None", ")", "and", "(", "s_state", ".", "upper", "(", ")", "!=", "server", ".", "get_state", "(", ")", ")", ")", ":", "return", "False", "return", "True" ]
checks if a server exists cli example: .
train
true
23,062
def expand_power_exp(expr, deep=True): return sympify(expr).expand(deep=deep, complex=False, basic=False, log=False, mul=False, power_exp=True, power_base=False, multinomial=False)
[ "def", "expand_power_exp", "(", "expr", ",", "deep", "=", "True", ")", ":", "return", "sympify", "(", "expr", ")", ".", "expand", "(", "deep", "=", "deep", ",", "complex", "=", "False", ",", "basic", "=", "False", ",", "log", "=", "False", ",", "mul", "=", "False", ",", "power_exp", "=", "True", ",", "power_base", "=", "False", ",", "multinomial", "=", "False", ")" ]
wrapper around expand that only uses the power_exp hint .
train
false
23,064
def removeH1(document): h1 = domhelpers.findNodesNamed(document, 'h1') empty = dom.Element('span') for node in h1: node.parentNode.replaceChild(empty, node)
[ "def", "removeH1", "(", "document", ")", ":", "h1", "=", "domhelpers", ".", "findNodesNamed", "(", "document", ",", "'h1'", ")", "empty", "=", "dom", ".", "Element", "(", "'span'", ")", "for", "node", "in", "h1", ":", "node", ".", "parentNode", ".", "replaceChild", "(", "empty", ",", "node", ")" ]
replace all c{h1} nodes in the given document with empty c{span} nodes .
train
false
23,065
def zdt3(individual): g = (1.0 + ((9.0 * sum(individual[1:])) / (len(individual) - 1))) f1 = individual[0] f2 = (g * ((1 - sqrt((f1 / g))) - ((f1 / g) * sin(((10 * pi) * f1))))) return (f1, f2)
[ "def", "zdt3", "(", "individual", ")", ":", "g", "=", "(", "1.0", "+", "(", "(", "9.0", "*", "sum", "(", "individual", "[", "1", ":", "]", ")", ")", "/", "(", "len", "(", "individual", ")", "-", "1", ")", ")", ")", "f1", "=", "individual", "[", "0", "]", "f2", "=", "(", "g", "*", "(", "(", "1", "-", "sqrt", "(", "(", "f1", "/", "g", ")", ")", ")", "-", "(", "(", "f1", "/", "g", ")", "*", "sin", "(", "(", "(", "10", "*", "pi", ")", "*", "f1", ")", ")", ")", ")", ")", "return", "(", "f1", ",", "f2", ")" ]
zdt3 multiobjective function .
train
false
23,066
@image_comparison(baseline_images=[u'EventCollection_plot__set_linewidth']) def test__EventCollection__set_linewidth(): (splt, coll, _) = generate_EventCollection_plot() new_linewidth = 5 coll.set_linewidth(new_linewidth) assert_equal(coll.get_linewidth(), new_linewidth) splt.set_title(u'EventCollection: set_linewidth')
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'EventCollection_plot__set_linewidth'", "]", ")", "def", "test__EventCollection__set_linewidth", "(", ")", ":", "(", "splt", ",", "coll", ",", "_", ")", "=", "generate_EventCollection_plot", "(", ")", "new_linewidth", "=", "5", "coll", ".", "set_linewidth", "(", "new_linewidth", ")", "assert_equal", "(", "coll", ".", "get_linewidth", "(", ")", ",", "new_linewidth", ")", "splt", ".", "set_title", "(", "u'EventCollection: set_linewidth'", ")" ]
check to make sure set_linestyle works properly .
train
false
23,067
@contextmanager def target_cell(context, cell_mapping): original_db_connection = context.db_connection from nova import db db_connection_string = cell_mapping.database_connection context.db_connection = db.create_context_manager(db_connection_string) try: (yield context) finally: context.db_connection = original_db_connection
[ "@", "contextmanager", "def", "target_cell", "(", "context", ",", "cell_mapping", ")", ":", "original_db_connection", "=", "context", ".", "db_connection", "from", "nova", "import", "db", "db_connection_string", "=", "cell_mapping", ".", "database_connection", "context", ".", "db_connection", "=", "db", ".", "create_context_manager", "(", "db_connection_string", ")", "try", ":", "(", "yield", "context", ")", "finally", ":", "context", ".", "db_connection", "=", "original_db_connection" ]
adds database connection information to the context for communicating with the given target cell .
train
false
23,068
def convert_column_args(method): def column_wrapper(self, *args, **kwargs): try: if len(args): int(args[0]) except ValueError: (cell_1, cell_2) = [(col + '1') for col in args[0].split(':')] (_, col_1) = xl_cell_to_rowcol(cell_1) (_, col_2) = xl_cell_to_rowcol(cell_2) new_args = [col_1, col_2] new_args.extend(args[1:]) args = new_args return method(self, *args, **kwargs) return column_wrapper
[ "def", "convert_column_args", "(", "method", ")", ":", "def", "column_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "if", "len", "(", "args", ")", ":", "int", "(", "args", "[", "0", "]", ")", "except", "ValueError", ":", "(", "cell_1", ",", "cell_2", ")", "=", "[", "(", "col", "+", "'1'", ")", "for", "col", "in", "args", "[", "0", "]", ".", "split", "(", "':'", ")", "]", "(", "_", ",", "col_1", ")", "=", "xl_cell_to_rowcol", "(", "cell_1", ")", "(", "_", ",", "col_2", ")", "=", "xl_cell_to_rowcol", "(", "cell_2", ")", "new_args", "=", "[", "col_1", ",", "col_2", "]", "new_args", ".", "extend", "(", "args", "[", "1", ":", "]", ")", "args", "=", "new_args", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "column_wrapper" ]
decorator function to convert a1 notation in columns method calls to the default row/col notation .
train
false
23,069
def has_vary_header(response, header_query): if (not response.has_header(u'Vary')): return False vary_headers = cc_delim_re.split(response[u'Vary']) existing_headers = set([header.lower() for header in vary_headers]) return (header_query.lower() in existing_headers)
[ "def", "has_vary_header", "(", "response", ",", "header_query", ")", ":", "if", "(", "not", "response", ".", "has_header", "(", "u'Vary'", ")", ")", ":", "return", "False", "vary_headers", "=", "cc_delim_re", ".", "split", "(", "response", "[", "u'Vary'", "]", ")", "existing_headers", "=", "set", "(", "[", "header", ".", "lower", "(", ")", "for", "header", "in", "vary_headers", "]", ")", "return", "(", "header_query", ".", "lower", "(", ")", "in", "existing_headers", ")" ]
checks to see if the response has a given header name in its vary header .
train
true
23,070
def is_program_installed(basename): for path in os.environ['PATH'].split(os.pathsep): abspath = osp.join(path, basename) if osp.isfile(abspath): return abspath
[ "def", "is_program_installed", "(", "basename", ")", ":", "for", "path", "in", "os", ".", "environ", "[", "'PATH'", "]", ".", "split", "(", "os", ".", "pathsep", ")", ":", "abspath", "=", "osp", ".", "join", "(", "path", ",", "basename", ")", "if", "osp", ".", "isfile", "(", "abspath", ")", ":", "return", "abspath" ]
return program absolute path if installed in path .
train
true
23,071
def malletmodel2ldamodel(mallet_model, gamma_threshold=0.001, iterations=50): model_gensim = LdaModel(id2word=mallet_model.id2word, num_topics=mallet_model.num_topics, alpha=mallet_model.alpha, iterations=iterations, gamma_threshold=gamma_threshold) model_gensim.expElogbeta[:] = mallet_model.wordtopics return model_gensim
[ "def", "malletmodel2ldamodel", "(", "mallet_model", ",", "gamma_threshold", "=", "0.001", ",", "iterations", "=", "50", ")", ":", "model_gensim", "=", "LdaModel", "(", "id2word", "=", "mallet_model", ".", "id2word", ",", "num_topics", "=", "mallet_model", ".", "num_topics", ",", "alpha", "=", "mallet_model", ".", "alpha", ",", "iterations", "=", "iterations", ",", "gamma_threshold", "=", "gamma_threshold", ")", "model_gensim", ".", "expElogbeta", "[", ":", "]", "=", "mallet_model", ".", "wordtopics", "return", "model_gensim" ]
function to convert mallet model to gensim ldamodel .
train
false
23,075
def defers(func): @wraps(func) def wrapped(*a, **kw): return defer.maybeDeferred(func, *a, **kw) return wrapped
[ "def", "defers", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "a", ",", "**", "kw", ")", ":", "return", "defer", ".", "maybeDeferred", "(", "func", ",", "*", "a", ",", "**", "kw", ")", "return", "wrapped" ]
decorator to make sure a function always returns a deferred .
train
false
23,076
def compute_edge_measures(ntwk): iflogger.info(u'Computing edge measures:') measures = {} return measures
[ "def", "compute_edge_measures", "(", "ntwk", ")", ":", "iflogger", ".", "info", "(", "u'Computing edge measures:'", ")", "measures", "=", "{", "}", "return", "measures" ]
these return edge-based measures .
train
false
23,077
def install_overlay(module, name, list_url=None): layman_conf = BareConfig(read_configfile=True) layman = init_layman(layman_conf) if layman.is_installed(name): return False if module.check_mode: mymsg = (("Would add layman repo '" + name) + "'") module.exit_json(changed=True, msg=mymsg) if (not layman.is_repo(name)): if (not list_url): raise ModuleError(("Overlay '%s' is not on the list of known overlays and URL of the remote list was not provided." % name)) overlay_defs = layman_conf.get_option('overlay_defs') dest = path.join(overlay_defs, (name + '.xml')) download_url(module, list_url, dest) layman = init_layman() if (not layman.add_repos(name)): raise ModuleError(layman.get_errors()) return True
[ "def", "install_overlay", "(", "module", ",", "name", ",", "list_url", "=", "None", ")", ":", "layman_conf", "=", "BareConfig", "(", "read_configfile", "=", "True", ")", "layman", "=", "init_layman", "(", "layman_conf", ")", "if", "layman", ".", "is_installed", "(", "name", ")", ":", "return", "False", "if", "module", ".", "check_mode", ":", "mymsg", "=", "(", "(", "\"Would add layman repo '\"", "+", "name", ")", "+", "\"'\"", ")", "module", ".", "exit_json", "(", "changed", "=", "True", ",", "msg", "=", "mymsg", ")", "if", "(", "not", "layman", ".", "is_repo", "(", "name", ")", ")", ":", "if", "(", "not", "list_url", ")", ":", "raise", "ModuleError", "(", "(", "\"Overlay '%s' is not on the list of known overlays and URL of the remote list was not provided.\"", "%", "name", ")", ")", "overlay_defs", "=", "layman_conf", ".", "get_option", "(", "'overlay_defs'", ")", "dest", "=", "path", ".", "join", "(", "overlay_defs", ",", "(", "name", "+", "'.xml'", ")", ")", "download_url", "(", "module", ",", "list_url", ",", "dest", ")", "layman", "=", "init_layman", "(", ")", "if", "(", "not", "layman", ".", "add_repos", "(", "name", ")", ")", ":", "raise", "ModuleError", "(", "layman", ".", "get_errors", "(", ")", ")", "return", "True" ]
installs the overlay repository .
train
false
23,081
def errcheck(ret, func, args): if (ret == (-1)): try: ec = ctypes.get_errno() raise LabJackException(ec, ('Exodriver returned error number %s' % ec)) except AttributeError: raise LabJackException((-1), 'Exodriver returned an error, but LabJackPython is unable to read the error code. Upgrade to Python 2.6 for this functionality.') else: return ret
[ "def", "errcheck", "(", "ret", ",", "func", ",", "args", ")", ":", "if", "(", "ret", "==", "(", "-", "1", ")", ")", ":", "try", ":", "ec", "=", "ctypes", ".", "get_errno", "(", ")", "raise", "LabJackException", "(", "ec", ",", "(", "'Exodriver returned error number %s'", "%", "ec", ")", ")", "except", "AttributeError", ":", "raise", "LabJackException", "(", "(", "-", "1", ")", ",", "'Exodriver returned an error, but LabJackPython is unable to read the error code. Upgrade to Python 2.6 for this functionality.'", ")", "else", ":", "return", "ret" ]
whenever a function is called through ctypes .
train
false
23,082
def _ipset_cmd(): return salt.utils.which('ipset')
[ "def", "_ipset_cmd", "(", ")", ":", "return", "salt", ".", "utils", ".", "which", "(", "'ipset'", ")" ]
return correct command .
train
false
23,083
def visit_hcb_html(self, node): global HCB_COUNTER HCB_COUNTER += 1 try: self.visit_literal_block(node) except nodes.SkipNode: pass code_block = self.body[(-1)] fill_header = {'divname': 'hiddencodeblock{0}'.format(HCB_COUNTER), 'startdisplay': ('none' if node['starthidden'] else 'block'), 'label': node.get('label')} divheader = '<p><strong><a href="javascript:showhide(document.getElementById(\'{divname}\'))">{label}</a></strong></p><div id="{divname}" style="display: {startdisplay}">'.format(**fill_header) code_block = (((js_showhide + divheader) + code_block) + '</div>') self.body[(-1)] = code_block raise nodes.SkipNode
[ "def", "visit_hcb_html", "(", "self", ",", "node", ")", ":", "global", "HCB_COUNTER", "HCB_COUNTER", "+=", "1", "try", ":", "self", ".", "visit_literal_block", "(", "node", ")", "except", "nodes", ".", "SkipNode", ":", "pass", "code_block", "=", "self", ".", "body", "[", "(", "-", "1", ")", "]", "fill_header", "=", "{", "'divname'", ":", "'hiddencodeblock{0}'", ".", "format", "(", "HCB_COUNTER", ")", ",", "'startdisplay'", ":", "(", "'none'", "if", "node", "[", "'starthidden'", "]", "else", "'block'", ")", ",", "'label'", ":", "node", ".", "get", "(", "'label'", ")", "}", "divheader", "=", "'<p><strong><a href=\"javascript:showhide(document.getElementById(\\'{divname}\\'))\">{label}</a></strong></p><div id=\"{divname}\" style=\"display: {startdisplay}\">'", ".", "format", "(", "**", "fill_header", ")", "code_block", "=", "(", "(", "(", "js_showhide", "+", "divheader", ")", "+", "code_block", ")", "+", "'</div>'", ")", "self", ".", "body", "[", "(", "-", "1", ")", "]", "=", "code_block", "raise", "nodes", ".", "SkipNode" ]
visit hidden code block .
train
false
23,084
def feature_hidden(feature_name): return ((feature_name is not None) and (feature_name in settings.OSCAR_HIDDEN_FEATURES))
[ "def", "feature_hidden", "(", "feature_name", ")", ":", "return", "(", "(", "feature_name", "is", "not", "None", ")", "and", "(", "feature_name", "in", "settings", ".", "OSCAR_HIDDEN_FEATURES", ")", ")" ]
test if a certain oscar feature is disabled .
train
false
23,085
def is_process_64_from_handle(hProcess): iswow64 = c_bool(False) if (IsWow64Process is None): return False if (not IsWow64Process(hProcess, byref(iswow64))): raise WinError() return (not iswow64.value)
[ "def", "is_process_64_from_handle", "(", "hProcess", ")", ":", "iswow64", "=", "c_bool", "(", "False", ")", "if", "(", "IsWow64Process", "is", "None", ")", ":", "return", "False", "if", "(", "not", "IsWow64Process", "(", "hProcess", ",", "byref", "(", "iswow64", ")", ")", ")", ":", "raise", "WinError", "(", ")", "return", "(", "not", "iswow64", ".", "value", ")" ]
take a process handle .
train
false
23,086
def with_worker_threads(n_threads, dbname='foo', n_jobs=sys.maxsize, timeout=10.0): def newth(ii): return threading.Thread(target=_worker_thread_fn, args=(('hostname', ii), n_jobs, timeout, dbname)) def deco(f): def wrapper(*args, **kwargs): threads = list(map(newth, list(range(n_threads)))) [th.start() for th in threads] try: return f(*args, **kwargs) finally: [th.join() for th in threads] wrapper.__name__ = f.__name__ return wrapper return deco
[ "def", "with_worker_threads", "(", "n_threads", ",", "dbname", "=", "'foo'", ",", "n_jobs", "=", "sys", ".", "maxsize", ",", "timeout", "=", "10.0", ")", ":", "def", "newth", "(", "ii", ")", ":", "return", "threading", ".", "Thread", "(", "target", "=", "_worker_thread_fn", ",", "args", "=", "(", "(", "'hostname'", ",", "ii", ")", ",", "n_jobs", ",", "timeout", ",", "dbname", ")", ")", "def", "deco", "(", "f", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "threads", "=", "list", "(", "map", "(", "newth", ",", "list", "(", "range", "(", "n_threads", ")", ")", ")", ")", "[", "th", ".", "start", "(", ")", "for", "th", "in", "threads", "]", "try", ":", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "[", "th", ".", "join", "(", ")", "for", "th", "in", "threads", "]", "wrapper", ".", "__name__", "=", "f", ".", "__name__", "return", "wrapper", "return", "deco" ]
decorator that will run a test with some mongoworker threads in flight .
train
false
23,087
def pretty_iban(iban): return ' '.join([iban[i:(i + 4)] for i in range(0, len(iban), 4)])
[ "def", "pretty_iban", "(", "iban", ")", ":", "return", "' '", ".", "join", "(", "[", "iban", "[", "i", ":", "(", "i", "+", "4", ")", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "iban", ")", ",", "4", ")", "]", ")" ]
return iban in groups of four characters separated by a single space .
train
false
23,088
def read_serial(console): data_bytes = console.inWaiting() if data_bytes: return console.read(data_bytes) else: return ''
[ "def", "read_serial", "(", "console", ")", ":", "data_bytes", "=", "console", ".", "inWaiting", "(", ")", "if", "data_bytes", ":", "return", "console", ".", "read", "(", "data_bytes", ")", "else", ":", "return", "''" ]
check if there is data waiting to be read read and return it .
train
false