id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
54,676
def stub_out_registry_image_update(stubs): test_stubs.stub_out_registry_server(stubs) def fake_image_update(ctx, image_id, values, purge_props=False): return {'properties': {}} stubs.Set(db_api, 'image_update', fake_image_update)
[ "def", "stub_out_registry_image_update", "(", "stubs", ")", ":", "test_stubs", ".", "stub_out_registry_server", "(", "stubs", ")", "def", "fake_image_update", "(", "ctx", ",", "image_id", ",", "values", ",", "purge_props", "=", "False", ")", ":", "return", "{", "'properties'", ":", "{", "}", "}", "stubs", ".", "Set", "(", "db_api", ",", "'image_update'", ",", "fake_image_update", ")" ]
stubs an image update on the registry .
train
false
54,677
def XmlToString(content, encoding='utf-8', pretty=False): xml_parts = [('<?xml version="1.0" encoding="%s"?>' % encoding)] if pretty: xml_parts.append('\n') _ConstructContentList(xml_parts, content, pretty) return ''.join(xml_parts)
[ "def", "XmlToString", "(", "content", ",", "encoding", "=", "'utf-8'", ",", "pretty", "=", "False", ")", ":", "xml_parts", "=", "[", "(", "'<?xml version=\"1.0\" encoding=\"%s\"?>'", "%", "encoding", ")", "]", "if", "pretty", ":", "xml_parts", ".", "append", "(", "'\\n'", ")", "_ConstructContentList", "(", "xml_parts", ",", "content", ",", "pretty", ")", "return", "''", ".", "join", "(", "xml_parts", ")" ]
writes the xml content to disk .
train
false
54,678
def add_email_to_campaign(survey, email): token = settings.SURVEYGIZMO_API_TOKEN secret = settings.SURVEYGIZMO_API_TOKEN_SECRET if ((token is None) or (secret is None)): return survey_id = SURVEYS[survey]['exit_survey_id'] campaign_id = SURVEYS[survey]['exit_survey_campaign_id'] try: requests.put('https://restapi.surveygizmo.com/v2/survey/{survey}/surveycampaign/{campaign}/contact?semailaddress={email}&api_token={token}&api_token_secret={secret}'.format(survey=survey_id, campaign=campaign_id, email=email, token=token, secret=secret), timeout=30) except requests.exceptions.Timeout: print ('Timedout adding: %s' % email)
[ "def", "add_email_to_campaign", "(", "survey", ",", "email", ")", ":", "token", "=", "settings", ".", "SURVEYGIZMO_API_TOKEN", "secret", "=", "settings", ".", "SURVEYGIZMO_API_TOKEN_SECRET", "if", "(", "(", "token", "is", "None", ")", "or", "(", "secret", "is", "None", ")", ")", ":", "return", "survey_id", "=", "SURVEYS", "[", "survey", "]", "[", "'exit_survey_id'", "]", "campaign_id", "=", "SURVEYS", "[", "survey", "]", "[", "'exit_survey_campaign_id'", "]", "try", ":", "requests", ".", "put", "(", "'https://restapi.surveygizmo.com/v2/survey/{survey}/surveycampaign/{campaign}/contact?semailaddress={email}&api_token={token}&api_token_secret={secret}'", ".", "format", "(", "survey", "=", "survey_id", ",", "campaign", "=", "campaign_id", ",", "email", "=", "email", ",", "token", "=", "token", ",", "secret", "=", "secret", ")", ",", "timeout", "=", "30", ")", "except", "requests", ".", "exceptions", ".", "Timeout", ":", "print", "(", "'Timedout adding: %s'", "%", "email", ")" ]
add email to the exit survey campaign .
train
false
54,680
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_resize_confirm(cs, args): _find_server(cs, args.server).confirm_resize()
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_resize_confirm", "(", "cs", ",", "args", ")", ":", "_find_server", "(", "cs", ",", "args", ".", "server", ")", ".", "confirm_resize", "(", ")" ]
confirm a previous resize .
train
false
54,684
def p_test(p): p[0] = p[1]
[ "def", "p_test", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
test : comparison .
train
false
54,685
def _get_forward_relationships(opts): forward_relations = OrderedDict() for field in [field for field in opts.fields if (field.serialize and get_remote_field(field))]: forward_relations[field.name] = RelationInfo(model_field=field, related_model=get_related_model(field), to_many=False, to_field=_get_to_field(field), has_through_model=False, reverse=False) for field in [field for field in opts.many_to_many if field.serialize]: forward_relations[field.name] = RelationInfo(model_field=field, related_model=get_related_model(field), to_many=True, to_field=None, has_through_model=(not get_remote_field(field).through._meta.auto_created), reverse=False) return forward_relations
[ "def", "_get_forward_relationships", "(", "opts", ")", ":", "forward_relations", "=", "OrderedDict", "(", ")", "for", "field", "in", "[", "field", "for", "field", "in", "opts", ".", "fields", "if", "(", "field", ".", "serialize", "and", "get_remote_field", "(", "field", ")", ")", "]", ":", "forward_relations", "[", "field", ".", "name", "]", "=", "RelationInfo", "(", "model_field", "=", "field", ",", "related_model", "=", "get_related_model", "(", "field", ")", ",", "to_many", "=", "False", ",", "to_field", "=", "_get_to_field", "(", "field", ")", ",", "has_through_model", "=", "False", ",", "reverse", "=", "False", ")", "for", "field", "in", "[", "field", "for", "field", "in", "opts", ".", "many_to_many", "if", "field", ".", "serialize", "]", ":", "forward_relations", "[", "field", ".", "name", "]", "=", "RelationInfo", "(", "model_field", "=", "field", ",", "related_model", "=", "get_related_model", "(", "field", ")", ",", "to_many", "=", "True", ",", "to_field", "=", "None", ",", "has_through_model", "=", "(", "not", "get_remote_field", "(", "field", ")", ".", "through", ".", "_meta", ".", "auto_created", ")", ",", "reverse", "=", "False", ")", "return", "forward_relations" ]
returns an ordereddict of field names to relationinfo .
train
false
54,686
def cap_alert_is_template(alert_id): if (not alert_id): return False table = current.s3db.cap_alert query = (table.id == alert_id) r = current.db(query).select(table.is_template, limitby=(0, 1)).first() return (r and r.is_template)
[ "def", "cap_alert_is_template", "(", "alert_id", ")", ":", "if", "(", "not", "alert_id", ")", ":", "return", "False", "table", "=", "current", ".", "s3db", ".", "cap_alert", "query", "=", "(", "table", ".", "id", "==", "alert_id", ")", "r", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "table", ".", "is_template", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "return", "(", "r", "and", "r", ".", "is_template", ")" ]
tell whether an alert entry is a template .
train
false
54,687
@task @timed def check_memcache(): if (not is_memcache_running()): msg = colorize('red', 'Memcache is not running locally.') print msg sys.exit(1)
[ "@", "task", "@", "timed", "def", "check_memcache", "(", ")", ":", "if", "(", "not", "is_memcache_running", "(", ")", ")", ":", "msg", "=", "colorize", "(", "'red'", ",", "'Memcache is not running locally.'", ")", "print", "msg", "sys", ".", "exit", "(", "1", ")" ]
check that memcache is running .
train
false
54,690
def pr_api_url_from_web_url(url): path = '/'.join(map(partial(replace, 'pull', 'pulls'), url_path_parts(url))) return ((API_BASE_URL + REPOS_API_PATH) + path)
[ "def", "pr_api_url_from_web_url", "(", "url", ")", ":", "path", "=", "'/'", ".", "join", "(", "map", "(", "partial", "(", "replace", ",", "'pull'", ",", "'pulls'", ")", ",", "url_path_parts", "(", "url", ")", ")", ")", "return", "(", "(", "API_BASE_URL", "+", "REPOS_API_PATH", ")", "+", "path", ")" ]
get the api url for a pull request from the web one .
train
false
54,691
def NullController(*_args, **_kwargs): return None
[ "def", "NullController", "(", "*", "_args", ",", "**", "_kwargs", ")", ":", "return", "None" ]
nonexistent controller - simply returns none .
train
false
54,692
def parse_backend_conf(backend, **kwargs): conf = settings.CACHES.get(backend, None) if (conf is not None): args = conf.copy() args.update(kwargs) backend = args.pop('BACKEND') location = args.pop('LOCATION', '') return (backend, location, args) else: try: (mod_path, cls_name) = backend.rsplit('.', 1) mod = importlib.import_module(mod_path) backend_cls = getattr(mod, cls_name) except (AttributeError, ImportError, ValueError): raise InvalidCacheBackendError(("Could not find backend '%s'" % backend)) location = kwargs.pop('LOCATION', '') return (backend, location, kwargs) raise InvalidCacheBackendError(("Couldn't find a cache backend named '%s'" % backend))
[ "def", "parse_backend_conf", "(", "backend", ",", "**", "kwargs", ")", ":", "conf", "=", "settings", ".", "CACHES", ".", "get", "(", "backend", ",", "None", ")", "if", "(", "conf", "is", "not", "None", ")", ":", "args", "=", "conf", ".", "copy", "(", ")", "args", ".", "update", "(", "kwargs", ")", "backend", "=", "args", ".", "pop", "(", "'BACKEND'", ")", "location", "=", "args", ".", "pop", "(", "'LOCATION'", ",", "''", ")", "return", "(", "backend", ",", "location", ",", "args", ")", "else", ":", "try", ":", "(", "mod_path", ",", "cls_name", ")", "=", "backend", ".", "rsplit", "(", "'.'", ",", "1", ")", "mod", "=", "importlib", ".", "import_module", "(", "mod_path", ")", "backend_cls", "=", "getattr", "(", "mod", ",", "cls_name", ")", "except", "(", "AttributeError", ",", "ImportError", ",", "ValueError", ")", ":", "raise", "InvalidCacheBackendError", "(", "(", "\"Could not find backend '%s'\"", "%", "backend", ")", ")", "location", "=", "kwargs", ".", "pop", "(", "'LOCATION'", ",", "''", ")", "return", "(", "backend", ",", "location", ",", "kwargs", ")", "raise", "InvalidCacheBackendError", "(", "(", "\"Couldn't find a cache backend named '%s'\"", "%", "backend", ")", ")" ]
helper function to parse the backend configuration that doesnt use the uri notation .
train
false
54,693
def normalize_timestamp(timestamp): return Timestamp(timestamp).normal
[ "def", "normalize_timestamp", "(", "timestamp", ")", ":", "return", "Timestamp", "(", "timestamp", ")", ".", "normal" ]
format a timestamp into a standardized xxxxxxxxxx .
train
false
54,694
def _write_proj(fid, projs): if (len(projs) == 0): return start_block(fid, FIFF.FIFFB_PROJ) for proj in projs: start_block(fid, FIFF.FIFFB_PROJ_ITEM) write_int(fid, FIFF.FIFF_NCHAN, proj['data']['ncol']) write_name_list(fid, FIFF.FIFF_PROJ_ITEM_CH_NAME_LIST, proj['data']['col_names']) write_string(fid, FIFF.FIFF_NAME, proj['desc']) write_int(fid, FIFF.FIFF_PROJ_ITEM_KIND, proj['kind']) if (proj['kind'] == FIFF.FIFFV_PROJ_ITEM_FIELD): write_float(fid, FIFF.FIFF_PROJ_ITEM_TIME, 0.0) write_int(fid, FIFF.FIFF_PROJ_ITEM_NVEC, proj['data']['nrow']) write_int(fid, FIFF.FIFF_MNE_PROJ_ITEM_ACTIVE, proj['active']) write_float_matrix(fid, FIFF.FIFF_PROJ_ITEM_VECTORS, proj['data']['data']) if (proj['explained_var'] is not None): write_float(fid, FIFF.FIFF_MNE_ICA_PCA_EXPLAINED_VAR, proj['explained_var']) end_block(fid, FIFF.FIFFB_PROJ_ITEM) end_block(fid, FIFF.FIFFB_PROJ)
[ "def", "_write_proj", "(", "fid", ",", "projs", ")", ":", "if", "(", "len", "(", "projs", ")", "==", "0", ")", ":", "return", "start_block", "(", "fid", ",", "FIFF", ".", "FIFFB_PROJ", ")", "for", "proj", "in", "projs", ":", "start_block", "(", "fid", ",", "FIFF", ".", "FIFFB_PROJ_ITEM", ")", "write_int", "(", "fid", ",", "FIFF", ".", "FIFF_NCHAN", ",", "proj", "[", "'data'", "]", "[", "'ncol'", "]", ")", "write_name_list", "(", "fid", ",", "FIFF", ".", "FIFF_PROJ_ITEM_CH_NAME_LIST", ",", "proj", "[", "'data'", "]", "[", "'col_names'", "]", ")", "write_string", "(", "fid", ",", "FIFF", ".", "FIFF_NAME", ",", "proj", "[", "'desc'", "]", ")", "write_int", "(", "fid", ",", "FIFF", ".", "FIFF_PROJ_ITEM_KIND", ",", "proj", "[", "'kind'", "]", ")", "if", "(", "proj", "[", "'kind'", "]", "==", "FIFF", ".", "FIFFV_PROJ_ITEM_FIELD", ")", ":", "write_float", "(", "fid", ",", "FIFF", ".", "FIFF_PROJ_ITEM_TIME", ",", "0.0", ")", "write_int", "(", "fid", ",", "FIFF", ".", "FIFF_PROJ_ITEM_NVEC", ",", "proj", "[", "'data'", "]", "[", "'nrow'", "]", ")", "write_int", "(", "fid", ",", "FIFF", ".", "FIFF_MNE_PROJ_ITEM_ACTIVE", ",", "proj", "[", "'active'", "]", ")", "write_float_matrix", "(", "fid", ",", "FIFF", ".", "FIFF_PROJ_ITEM_VECTORS", ",", "proj", "[", "'data'", "]", "[", "'data'", "]", ")", "if", "(", "proj", "[", "'explained_var'", "]", "is", "not", "None", ")", ":", "write_float", "(", "fid", ",", "FIFF", ".", "FIFF_MNE_ICA_PCA_EXPLAINED_VAR", ",", "proj", "[", "'explained_var'", "]", ")", "end_block", "(", "fid", ",", "FIFF", ".", "FIFFB_PROJ_ITEM", ")", "end_block", "(", "fid", ",", "FIFF", ".", "FIFFB_PROJ", ")" ]
write a projection operator to a file .
train
false
54,695
def clean_up_tables(db, tmp_prefix): db.execute('BEGIN') for table in ('point', 'line', 'roads', 'polygon'): db.execute(('DROP TABLE %(tmp_prefix)s_%(table)s' % locals())) db.execute(("DELETE FROM geometry_columns WHERE f_table_name = '%(tmp_prefix)s_%(table)s'" % locals())) db.execute('COMMIT')
[ "def", "clean_up_tables", "(", "db", ",", "tmp_prefix", ")", ":", "db", ".", "execute", "(", "'BEGIN'", ")", "for", "table", "in", "(", "'point'", ",", "'line'", ",", "'roads'", ",", "'polygon'", ")", ":", "db", ".", "execute", "(", "(", "'DROP TABLE %(tmp_prefix)s_%(table)s'", "%", "locals", "(", ")", ")", ")", "db", ".", "execute", "(", "(", "\"DELETE FROM geometry_columns WHERE f_table_name = '%(tmp_prefix)s_%(table)s'\"", "%", "locals", "(", ")", ")", ")", "db", ".", "execute", "(", "'COMMIT'", ")" ]
drop all temporary tables created by prepare_data() .
train
false
54,696
def custom_url(generator, metadata): global global_siteurl global_siteurl = generator.settings['SITEURL']
[ "def", "custom_url", "(", "generator", ",", "metadata", ")", ":", "global", "global_siteurl", "global_siteurl", "=", "generator", ".", "settings", "[", "'SITEURL'", "]" ]
saves globally the value of siteurl configuration parameter .
train
false
54,697
def find_sockfiles(): paths = [] for dir in SEARCH_DIRS: if ((not os.path.isdir(dir)) or (not os.access(dir, os.R_OK))): continue for name in os.listdir(dir): subdir = os.path.join(dir, name) if ((not os.path.isdir(subdir)) or (not os.access(subdir, os.R_OK))): continue for subname in os.listdir(subdir): path = os.path.join(subdir, subname) if utils.is_sockfile(path): paths.append(path) break for sockfile in DEFAULT_SOCKFILES: if (not utils.is_sockfile(sockfile)): continue paths.append(sockfile) return paths
[ "def", "find_sockfiles", "(", ")", ":", "paths", "=", "[", "]", "for", "dir", "in", "SEARCH_DIRS", ":", "if", "(", "(", "not", "os", ".", "path", ".", "isdir", "(", "dir", ")", ")", "or", "(", "not", "os", ".", "access", "(", "dir", ",", "os", ".", "R_OK", ")", ")", ")", ":", "continue", "for", "name", "in", "os", ".", "listdir", "(", "dir", ")", ":", "subdir", "=", "os", ".", "path", ".", "join", "(", "dir", ",", "name", ")", "if", "(", "(", "not", "os", ".", "path", ".", "isdir", "(", "subdir", ")", ")", "or", "(", "not", "os", ".", "access", "(", "subdir", ",", "os", ".", "R_OK", ")", ")", ")", ":", "continue", "for", "subname", "in", "os", ".", "listdir", "(", "subdir", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "subdir", ",", "subname", ")", "if", "utils", ".", "is_sockfile", "(", "path", ")", ":", "paths", ".", "append", "(", "path", ")", "break", "for", "sockfile", "in", "DEFAULT_SOCKFILES", ":", "if", "(", "not", "utils", ".", "is_sockfile", "(", "sockfile", ")", ")", ":", "continue", "paths", ".", "append", "(", "sockfile", ")", "return", "paths" ]
returns a list of paths to socket files to monitor .
train
false
54,699
def has_no_time(at): if isinstance(at, datetime): return False return ((at.hour is None) and (at.minute is None) and (at.second is None) and (at.microsecond is None))
[ "def", "has_no_time", "(", "at", ")", ":", "if", "isinstance", "(", "at", ",", "datetime", ")", ":", "return", "False", "return", "(", "(", "at", ".", "hour", "is", "None", ")", "and", "(", "at", ".", "minute", "is", "None", ")", "and", "(", "at", ".", "second", "is", "None", ")", "and", "(", "at", ".", "microsecond", "is", "None", ")", ")" ]
returns true if the given object is an adatetime where hour .
train
false
54,700
def primes(n): if (n == 2): return [2] elif (n < 2): return [] s = list(range(3, (n + 1), 2)) mroot = (n ** 0.5) half = (((n + 1) // 2) - 1) i = 0 m = 3 while (m <= mroot): if s[i]: j = (((m * m) - 3) // 2) s[j] = 0 while (j < half): s[j] = 0 j += m i = (i + 1) m = ((2 * i) + 3) return ([2] + [x for x in s if x])
[ "def", "primes", "(", "n", ")", ":", "if", "(", "n", "==", "2", ")", ":", "return", "[", "2", "]", "elif", "(", "n", "<", "2", ")", ":", "return", "[", "]", "s", "=", "list", "(", "range", "(", "3", ",", "(", "n", "+", "1", ")", ",", "2", ")", ")", "mroot", "=", "(", "n", "**", "0.5", ")", "half", "=", "(", "(", "(", "n", "+", "1", ")", "//", "2", ")", "-", "1", ")", "i", "=", "0", "m", "=", "3", "while", "(", "m", "<=", "mroot", ")", ":", "if", "s", "[", "i", "]", ":", "j", "=", "(", "(", "(", "m", "*", "m", ")", "-", "3", ")", "//", "2", ")", "s", "[", "j", "]", "=", "0", "while", "(", "j", "<", "half", ")", ":", "s", "[", "j", "]", "=", "0", "j", "+=", "m", "i", "=", "(", "i", "+", "1", ")", "m", "=", "(", "(", "2", "*", "i", ")", "+", "3", ")", "return", "(", "[", "2", "]", "+", "[", "x", "for", "x", "in", "s", "if", "x", "]", ")" ]
simple test function taken from URL .
train
true
54,701
def test_MultipleLocator_set_params(): mult = mticker.MultipleLocator(base=0.7) mult.set_params(base=1.7) assert (mult._base == 1.7)
[ "def", "test_MultipleLocator_set_params", "(", ")", ":", "mult", "=", "mticker", ".", "MultipleLocator", "(", "base", "=", "0.7", ")", "mult", ".", "set_params", "(", "base", "=", "1.7", ")", "assert", "(", "mult", ".", "_base", "==", "1.7", ")" ]
create multiple locator with 0 .
train
false
54,703
def stack(xs, axis=0): xs = [expand_dims.expand_dims(x, axis=axis) for x in xs] return concat.concat(xs, axis=axis)
[ "def", "stack", "(", "xs", ",", "axis", "=", "0", ")", ":", "xs", "=", "[", "expand_dims", ".", "expand_dims", "(", "x", ",", "axis", "=", "axis", ")", "for", "x", "in", "xs", "]", "return", "concat", ".", "concat", "(", "xs", ",", "axis", "=", "axis", ")" ]
concatenate variables along a new axis .
train
false
54,705
def which_files(file, mode=(F_OK | X_OK), path=None, pathext=None): (filepath, file) = split(file) if filepath: path = (filepath,) elif (path is None): path = defpath elif isinstance(path, str): path = path.split(pathsep) if (pathext is None): pathext = defpathext elif isinstance(pathext, str): pathext = pathext.split(pathsep) if (not ('' in pathext)): pathext.insert(0, '') for dir in path: basepath = join(dir, file) for ext in pathext: fullpath = (basepath + ext) if (exists(fullpath) and access(fullpath, mode)): (yield fullpath)
[ "def", "which_files", "(", "file", ",", "mode", "=", "(", "F_OK", "|", "X_OK", ")", ",", "path", "=", "None", ",", "pathext", "=", "None", ")", ":", "(", "filepath", ",", "file", ")", "=", "split", "(", "file", ")", "if", "filepath", ":", "path", "=", "(", "filepath", ",", ")", "elif", "(", "path", "is", "None", ")", ":", "path", "=", "defpath", "elif", "isinstance", "(", "path", ",", "str", ")", ":", "path", "=", "path", ".", "split", "(", "pathsep", ")", "if", "(", "pathext", "is", "None", ")", ":", "pathext", "=", "defpathext", "elif", "isinstance", "(", "pathext", ",", "str", ")", ":", "pathext", "=", "pathext", ".", "split", "(", "pathsep", ")", "if", "(", "not", "(", "''", "in", "pathext", ")", ")", ":", "pathext", ".", "insert", "(", "0", ",", "''", ")", "for", "dir", "in", "path", ":", "basepath", "=", "join", "(", "dir", ",", "file", ")", "for", "ext", "in", "pathext", ":", "fullpath", "=", "(", "basepath", "+", "ext", ")", "if", "(", "exists", "(", "fullpath", ")", "and", "access", "(", "fullpath", ",", "mode", ")", ")", ":", "(", "yield", "fullpath", ")" ]
locate a file in a path supplied as a part of the file name .
train
false
54,707
def valid_android_zip(app_dir): try: print '[INFO] Checking for ZIP Validity and Mode' man = os.path.isfile(os.path.join(app_dir, 'AndroidManifest.xml')) src = os.path.exists(os.path.join(app_dir, 'src/')) if (man and src): return ('eclipse', True) man = os.path.isfile(os.path.join(app_dir, 'app/src/main/AndroidManifest.xml')) src = os.path.exists(os.path.join(app_dir, 'app/src/main/java/')) if (man and src): return ('studio', True) xcode = [f for f in os.listdir(app_dir) if f.endswith('.xcodeproj')] if xcode: return ('ios', True) return ('', False) except: PrintException('[ERROR] Determining Upload type')
[ "def", "valid_android_zip", "(", "app_dir", ")", ":", "try", ":", "print", "'[INFO] Checking for ZIP Validity and Mode'", "man", "=", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'AndroidManifest.xml'", ")", ")", "src", "=", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'src/'", ")", ")", "if", "(", "man", "and", "src", ")", ":", "return", "(", "'eclipse'", ",", "True", ")", "man", "=", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'app/src/main/AndroidManifest.xml'", ")", ")", "src", "=", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'app/src/main/java/'", ")", ")", "if", "(", "man", "and", "src", ")", ":", "return", "(", "'studio'", ",", "True", ")", "xcode", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "app_dir", ")", "if", "f", ".", "endswith", "(", "'.xcodeproj'", ")", "]", "if", "xcode", ":", "return", "(", "'ios'", ",", "True", ")", "return", "(", "''", ",", "False", ")", "except", ":", "PrintException", "(", "'[ERROR] Determining Upload type'", ")" ]
test if this is an valid android zip .
train
false
54,708
def _trim_text(text, max_width): width = get_cwidth(text) if (width > max_width): if (len(text) == width): trimmed_text = (text[:max(1, (max_width - 3))] + u'...')[:max_width] return (trimmed_text, len(trimmed_text)) else: trimmed_text = u'' for c in text: if (get_cwidth((trimmed_text + c)) <= (max_width - 3)): trimmed_text += c trimmed_text += u'...' return (trimmed_text, get_cwidth(trimmed_text)) else: return (text, width)
[ "def", "_trim_text", "(", "text", ",", "max_width", ")", ":", "width", "=", "get_cwidth", "(", "text", ")", "if", "(", "width", ">", "max_width", ")", ":", "if", "(", "len", "(", "text", ")", "==", "width", ")", ":", "trimmed_text", "=", "(", "text", "[", ":", "max", "(", "1", ",", "(", "max_width", "-", "3", ")", ")", "]", "+", "u'...'", ")", "[", ":", "max_width", "]", "return", "(", "trimmed_text", ",", "len", "(", "trimmed_text", ")", ")", "else", ":", "trimmed_text", "=", "u''", "for", "c", "in", "text", ":", "if", "(", "get_cwidth", "(", "(", "trimmed_text", "+", "c", ")", ")", "<=", "(", "max_width", "-", "3", ")", ")", ":", "trimmed_text", "+=", "c", "trimmed_text", "+=", "u'...'", "return", "(", "trimmed_text", ",", "get_cwidth", "(", "trimmed_text", ")", ")", "else", ":", "return", "(", "text", ",", "width", ")" ]
trim the text to max_width .
train
true
54,709
def compute_node_statistics(context): return IMPL.compute_node_statistics(context)
[ "def", "compute_node_statistics", "(", "context", ")", ":", "return", "IMPL", ".", "compute_node_statistics", "(", "context", ")" ]
get aggregate statistics over all compute nodes .
train
false
54,710
def common_texification(text): text = re_mathdefault.sub(repl_mathdefault, text) parts = re_mathsep.split(text) for (i, s) in enumerate(parts): if (not (i % 2)): s = re_escapetext.sub(repl_escapetext, s) else: s = (u'\\(\\displaystyle %s\\)' % s) parts[i] = s return u''.join(parts)
[ "def", "common_texification", "(", "text", ")", ":", "text", "=", "re_mathdefault", ".", "sub", "(", "repl_mathdefault", ",", "text", ")", "parts", "=", "re_mathsep", ".", "split", "(", "text", ")", "for", "(", "i", ",", "s", ")", "in", "enumerate", "(", "parts", ")", ":", "if", "(", "not", "(", "i", "%", "2", ")", ")", ":", "s", "=", "re_escapetext", ".", "sub", "(", "repl_escapetext", ",", "s", ")", "else", ":", "s", "=", "(", "u'\\\\(\\\\displaystyle %s\\\\)'", "%", "s", ")", "parts", "[", "i", "]", "=", "s", "return", "u''", ".", "join", "(", "parts", ")" ]
do some necessary and/or useful substitutions for texts to be included in latex documents .
train
false
54,712
def test_vector_to_conv_c01b_invertible(): rng = np.random.RandomState([2013, 5, 1]) batch_size = 3 rows = 4 cols = 5 channels = 2 conv = Conv2DSpace([rows, cols], channels=channels, axes=('c', 0, 1, 'b')) vec = VectorSpace(conv.get_total_dimension()) X = conv.make_batch_theano() Y = conv.format_as(X, vec) Z = vec.format_as(Y, conv) A = vec.make_batch_theano() B = vec.format_as(A, conv) C = conv.format_as(B, vec) f = function([X, A], [Z, C]) X = rng.randn(*conv.get_origin_batch(batch_size).shape).astype(X.dtype) A = rng.randn(*vec.get_origin_batch(batch_size).shape).astype(A.dtype) (Z, C) = f(X, A) np.testing.assert_allclose(Z, X) np.testing.assert_allclose(C, A)
[ "def", "test_vector_to_conv_c01b_invertible", "(", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "[", "2013", ",", "5", ",", "1", "]", ")", "batch_size", "=", "3", "rows", "=", "4", "cols", "=", "5", "channels", "=", "2", "conv", "=", "Conv2DSpace", "(", "[", "rows", ",", "cols", "]", ",", "channels", "=", "channels", ",", "axes", "=", "(", "'c'", ",", "0", ",", "1", ",", "'b'", ")", ")", "vec", "=", "VectorSpace", "(", "conv", ".", "get_total_dimension", "(", ")", ")", "X", "=", "conv", ".", "make_batch_theano", "(", ")", "Y", "=", "conv", ".", "format_as", "(", "X", ",", "vec", ")", "Z", "=", "vec", ".", "format_as", "(", "Y", ",", "conv", ")", "A", "=", "vec", ".", "make_batch_theano", "(", ")", "B", "=", "vec", ".", "format_as", "(", "A", ",", "conv", ")", "C", "=", "conv", ".", "format_as", "(", "B", ",", "vec", ")", "f", "=", "function", "(", "[", "X", ",", "A", "]", ",", "[", "Z", ",", "C", "]", ")", "X", "=", "rng", ".", "randn", "(", "*", "conv", ".", "get_origin_batch", "(", "batch_size", ")", ".", "shape", ")", ".", "astype", "(", "X", ".", "dtype", ")", "A", "=", "rng", ".", "randn", "(", "*", "vec", ".", "get_origin_batch", "(", "batch_size", ")", ".", "shape", ")", ".", "astype", "(", "A", ".", "dtype", ")", "(", "Z", ",", "C", ")", "=", "f", "(", "X", ",", "A", ")", "np", ".", "testing", ".", "assert_allclose", "(", "Z", ",", "X", ")", "np", ".", "testing", ".", "assert_allclose", "(", "C", ",", "A", ")" ]
tests that the format_as methods between conv2dspace and vectorspace are invertible for the axis format .
train
false
54,713
@when(u'we connect to test database') def step_db_connect_test(context): db_name = context.conf[u'dbname'] context.cli.sendline(u'\\connect {0}'.format(db_name))
[ "@", "when", "(", "u'we connect to test database'", ")", "def", "step_db_connect_test", "(", "context", ")", ":", "db_name", "=", "context", ".", "conf", "[", "u'dbname'", "]", "context", ".", "cli", ".", "sendline", "(", "u'\\\\connect {0}'", ".", "format", "(", "db_name", ")", ")" ]
send connect to database .
train
false
54,714
def mul_elemwise(lh_op, rh_op): return lo.LinOp(lo.MUL_ELEM, lh_op.size, [rh_op], lh_op)
[ "def", "mul_elemwise", "(", "lh_op", ",", "rh_op", ")", ":", "return", "lo", ".", "LinOp", "(", "lo", ".", "MUL_ELEM", ",", "lh_op", ".", "size", ",", "[", "rh_op", "]", ",", "lh_op", ")" ]
multiply two linear operators elementwise .
train
false
54,715
def list_headers(general=None, request=None, response=None, entity=None): if (not (general or request or response or entity)): general = request = response = entity = True search = [] for (bool, strval) in ((general, 'general'), (request, 'request'), (response, 'response'), (entity, 'entity')): if bool: search.append(strval) return [head for head in _headers.values() if (head.category in search)]
[ "def", "list_headers", "(", "general", "=", "None", ",", "request", "=", "None", ",", "response", "=", "None", ",", "entity", "=", "None", ")", ":", "if", "(", "not", "(", "general", "or", "request", "or", "response", "or", "entity", ")", ")", ":", "general", "=", "request", "=", "response", "=", "entity", "=", "True", "search", "=", "[", "]", "for", "(", "bool", ",", "strval", ")", "in", "(", "(", "general", ",", "'general'", ")", ",", "(", "request", ",", "'request'", ")", ",", "(", "response", ",", "'response'", ")", ",", "(", "entity", ",", "'entity'", ")", ")", ":", "if", "bool", ":", "search", ".", "append", "(", "strval", ")", "return", "[", "head", "for", "head", "in", "_headers", ".", "values", "(", ")", "if", "(", "head", ".", "category", "in", "search", ")", "]" ]
list all headers for a given category .
train
false
54,716
def map_vera_device(vera_device, remap): import pyvera as veraApi if isinstance(vera_device, veraApi.VeraDimmer): return 'light' if isinstance(vera_device, veraApi.VeraBinarySensor): return 'binary_sensor' if isinstance(vera_device, veraApi.VeraSensor): return 'sensor' if isinstance(vera_device, veraApi.VeraArmableDevice): return 'switch' if isinstance(vera_device, veraApi.VeraLock): return 'lock' if isinstance(vera_device, veraApi.VeraThermostat): return 'climate' if isinstance(vera_device, veraApi.VeraCurtain): return 'cover' if isinstance(vera_device, veraApi.VeraSwitch): if (vera_device.device_id in remap): return 'light' else: return 'switch' return None
[ "def", "map_vera_device", "(", "vera_device", ",", "remap", ")", ":", "import", "pyvera", "as", "veraApi", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraDimmer", ")", ":", "return", "'light'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraBinarySensor", ")", ":", "return", "'binary_sensor'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraSensor", ")", ":", "return", "'sensor'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraArmableDevice", ")", ":", "return", "'switch'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraLock", ")", ":", "return", "'lock'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraThermostat", ")", ":", "return", "'climate'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraCurtain", ")", ":", "return", "'cover'", "if", "isinstance", "(", "vera_device", ",", "veraApi", ".", "VeraSwitch", ")", ":", "if", "(", "vera_device", ".", "device_id", "in", "remap", ")", ":", "return", "'light'", "else", ":", "return", "'switch'", "return", "None" ]
map vera classes to ha types .
train
false
54,719
def oo_ami_selector(data, image_name): if (not isinstance(data, list)): raise errors.AnsibleFilterError('|failed expects first param is a list') if (not data): return None elif ((image_name is None) or (not image_name.endswith('_*'))): ami = sorted(data, key=itemgetter('name'), reverse=True)[0] return ami['ami_id'] else: ami_info = [(ami, ami['name'].split('_')[(-1)]) for ami in data] ami = sorted(ami_info, key=itemgetter(1), reverse=True)[0][0] return ami['ami_id']
[ "def", "oo_ami_selector", "(", "data", ",", "image_name", ")", ":", "if", "(", "not", "isinstance", "(", "data", ",", "list", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects first param is a list'", ")", "if", "(", "not", "data", ")", ":", "return", "None", "elif", "(", "(", "image_name", "is", "None", ")", "or", "(", "not", "image_name", ".", "endswith", "(", "'_*'", ")", ")", ")", ":", "ami", "=", "sorted", "(", "data", ",", "key", "=", "itemgetter", "(", "'name'", ")", ",", "reverse", "=", "True", ")", "[", "0", "]", "return", "ami", "[", "'ami_id'", "]", "else", ":", "ami_info", "=", "[", "(", "ami", ",", "ami", "[", "'name'", "]", ".", "split", "(", "'_'", ")", "[", "(", "-", "1", ")", "]", ")", "for", "ami", "in", "data", "]", "ami", "=", "sorted", "(", "ami_info", ",", "key", "=", "itemgetter", "(", "1", ")", ",", "reverse", "=", "True", ")", "[", "0", "]", "[", "0", "]", "return", "ami", "[", "'ami_id'", "]" ]
this takes a list of amis and an image name and attempts to return the latest ami .
train
false
54,720
def get_lib_extension(): if (sys.platform in ['win32', 'cygwin']): return 'pyd' else: return 'so'
[ "def", "get_lib_extension", "(", ")", ":", "if", "(", "sys", ".", "platform", "in", "[", "'win32'", ",", "'cygwin'", "]", ")", ":", "return", "'pyd'", "else", ":", "return", "'so'" ]
return the platform-dependent extension for compiled modules .
train
false
54,721
def _sort_names(FQDNs): return sorted(FQDNs, key=(lambda fqdn: fqdn.split('.')[::(-1)][1:]))
[ "def", "_sort_names", "(", "FQDNs", ")", ":", "return", "sorted", "(", "FQDNs", ",", "key", "=", "(", "lambda", "fqdn", ":", "fqdn", ".", "split", "(", "'.'", ")", "[", ":", ":", "(", "-", "1", ")", "]", "[", "1", ":", "]", ")", ")" ]
sort fqdns by sld .
train
false
54,724
def delete_device(name, safety_on=True): config = _get_vistara_configuration() if (not config): return False access_token = _get_oath2_access_token(config['client_key'], config['client_secret']) if (not access_token): return 'Vistara access token not available' query_string = 'dnsName:{0}'.format(name) devices = _search_devices(query_string, config['client_id'], access_token) if (not devices): return 'No devices found' device_count = len(devices) if (safety_on and (device_count != 1)): return 'Expected to delete 1 device and found {0}. Set safety_on=False to override.'.format(device_count) delete_responses = [] for device in devices: device_id = device['id'] log.debug(device_id) delete_response = _delete_resource(device_id, config['client_id'], access_token) if (not delete_response): return False delete_responses.append(delete_response) return delete_responses
[ "def", "delete_device", "(", "name", ",", "safety_on", "=", "True", ")", ":", "config", "=", "_get_vistara_configuration", "(", ")", "if", "(", "not", "config", ")", ":", "return", "False", "access_token", "=", "_get_oath2_access_token", "(", "config", "[", "'client_key'", "]", ",", "config", "[", "'client_secret'", "]", ")", "if", "(", "not", "access_token", ")", ":", "return", "'Vistara access token not available'", "query_string", "=", "'dnsName:{0}'", ".", "format", "(", "name", ")", "devices", "=", "_search_devices", "(", "query_string", ",", "config", "[", "'client_id'", "]", ",", "access_token", ")", "if", "(", "not", "devices", ")", ":", "return", "'No devices found'", "device_count", "=", "len", "(", "devices", ")", "if", "(", "safety_on", "and", "(", "device_count", "!=", "1", ")", ")", ":", "return", "'Expected to delete 1 device and found {0}. Set safety_on=False to override.'", ".", "format", "(", "device_count", ")", "delete_responses", "=", "[", "]", "for", "device", "in", "devices", ":", "device_id", "=", "device", "[", "'id'", "]", "log", ".", "debug", "(", "device_id", ")", "delete_response", "=", "_delete_resource", "(", "device_id", ",", "config", "[", "'client_id'", "]", ",", "access_token", ")", "if", "(", "not", "delete_response", ")", ":", "return", "False", "delete_responses", ".", "append", "(", "delete_response", ")", "return", "delete_responses" ]
deletes a device from vistara based on dns name or partial name .
train
true
54,725
def make_avpr_object(json_data): if (hasattr(json_data, 'get') and callable(json_data.get)): name = json_data.get('protocol') namespace = json_data.get('namespace') types = json_data.get('types') messages = json_data.get('messages') return Protocol(name, namespace, types, messages) else: raise ProtocolParseException(('Not a JSON object: %s' % json_data))
[ "def", "make_avpr_object", "(", "json_data", ")", ":", "if", "(", "hasattr", "(", "json_data", ",", "'get'", ")", "and", "callable", "(", "json_data", ".", "get", ")", ")", ":", "name", "=", "json_data", ".", "get", "(", "'protocol'", ")", "namespace", "=", "json_data", ".", "get", "(", "'namespace'", ")", "types", "=", "json_data", ".", "get", "(", "'types'", ")", "messages", "=", "json_data", ".", "get", "(", "'messages'", ")", "return", "Protocol", "(", "name", ",", "namespace", ",", "types", ",", "messages", ")", "else", ":", "raise", "ProtocolParseException", "(", "(", "'Not a JSON object: %s'", "%", "json_data", ")", ")" ]
build avro protocol from data parsed out of json string .
train
false
54,726
@constructor def shape_padaxis(t, axis): _t = as_tensor_variable(t) ndim = (_t.ndim + 1) if (not ((- ndim) <= axis < ndim)): msg = 'axis {0} is out of bounds [-{1}, {1})'.format(axis, ndim) raise IndexError(msg) if (axis < 0): axis += ndim pattern = [i for i in xrange(_t.type.ndim)] pattern.insert(axis, 'x') return DimShuffle(_t.broadcastable, pattern)(_t)
[ "@", "constructor", "def", "shape_padaxis", "(", "t", ",", "axis", ")", ":", "_t", "=", "as_tensor_variable", "(", "t", ")", "ndim", "=", "(", "_t", ".", "ndim", "+", "1", ")", "if", "(", "not", "(", "(", "-", "ndim", ")", "<=", "axis", "<", "ndim", ")", ")", ":", "msg", "=", "'axis {0} is out of bounds [-{1}, {1})'", ".", "format", "(", "axis", ",", "ndim", ")", "raise", "IndexError", "(", "msg", ")", "if", "(", "axis", "<", "0", ")", ":", "axis", "+=", "ndim", "pattern", "=", "[", "i", "for", "i", "in", "xrange", "(", "_t", ".", "type", ".", "ndim", ")", "]", "pattern", ".", "insert", "(", "axis", ",", "'x'", ")", "return", "DimShuffle", "(", "_t", ".", "broadcastable", ",", "pattern", ")", "(", "_t", ")" ]
reshape t by inserting 1 at the dimension axis .
train
false
54,727
def dumps_with_persistent_ids(obj, protocol=None): file = BytesIO() pickler = pickle.Pickler(file, protocol) pickler.persistent_id = _persistent_id pickler.dump(obj) return file.getvalue()
[ "def", "dumps_with_persistent_ids", "(", "obj", ",", "protocol", "=", "None", ")", ":", "file", "=", "BytesIO", "(", ")", "pickler", "=", "pickle", ".", "Pickler", "(", "file", ",", "protocol", ")", "pickler", ".", "persistent_id", "=", "_persistent_id", "pickler", ".", "dump", "(", "obj", ")", "return", "file", ".", "getvalue", "(", ")" ]
performs a pickle dumps on the given object .
train
false
54,729
def expanding_quantile(arg, quantile, min_periods=1, freq=None): return ensure_compat('expanding', 'quantile', arg, freq=freq, min_periods=min_periods, func_kw=['quantile'], quantile=quantile)
[ "def", "expanding_quantile", "(", "arg", ",", "quantile", ",", "min_periods", "=", "1", ",", "freq", "=", "None", ")", ":", "return", "ensure_compat", "(", "'expanding'", ",", "'quantile'", ",", "arg", ",", "freq", "=", "freq", ",", "min_periods", "=", "min_periods", ",", "func_kw", "=", "[", "'quantile'", "]", ",", "quantile", "=", "quantile", ")" ]
expanding quantile .
train
false
54,730
def is_funcvar(expr): assert isinstance(expr, string_types), (u'%s is not a string' % expr) return (re.match(u'^[A-Z]\\d*$', expr) is not None)
[ "def", "is_funcvar", "(", "expr", ")", ":", "assert", "isinstance", "(", "expr", ",", "string_types", ")", ",", "(", "u'%s is not a string'", "%", "expr", ")", "return", "(", "re", ".", "match", "(", "u'^[A-Z]\\\\d*$'", ",", "expr", ")", "is", "not", "None", ")" ]
a function variable must be a single uppercase character followed by zero or more digits .
train
false
54,731
def install_readline(hook): global readline_hook, readline_ref readline_hook = hook PyOS_RFP = c_int.from_address(Console.GetProcAddress(sys.dllhandle, 'PyOS_ReadlineFunctionPointer')) if (sys.version < '2.3'): readline_ref = HOOKFUNC22(hook_wrapper) else: readline_ref = HOOKFUNC23(hook_wrapper_23) func_start = c_int.from_address(addressof(readline_ref)).value PyOS_RFP.value = func_start
[ "def", "install_readline", "(", "hook", ")", ":", "global", "readline_hook", ",", "readline_ref", "readline_hook", "=", "hook", "PyOS_RFP", "=", "c_int", ".", "from_address", "(", "Console", ".", "GetProcAddress", "(", "sys", ".", "dllhandle", ",", "'PyOS_ReadlineFunctionPointer'", ")", ")", "if", "(", "sys", ".", "version", "<", "'2.3'", ")", ":", "readline_ref", "=", "HOOKFUNC22", "(", "hook_wrapper", ")", "else", ":", "readline_ref", "=", "HOOKFUNC23", "(", "hook_wrapper_23", ")", "func_start", "=", "c_int", ".", "from_address", "(", "addressof", "(", "readline_ref", ")", ")", ".", "value", "PyOS_RFP", ".", "value", "=", "func_start" ]
set up things for the interpreter to call our function like gnu readline .
train
true
54,733
def _filter_configured_avoids(module): run_app = False if (hasattr(settings, 'LETTUCE_AVOID_APPS') and isinstance(settings.LETTUCE_AVOID_APPS, (list, tuple))): for appname in settings.LETTUCE_AVOID_APPS: if module.__name__.startswith(appname): run_app = True return (not run_app)
[ "def", "_filter_configured_avoids", "(", "module", ")", ":", "run_app", "=", "False", "if", "(", "hasattr", "(", "settings", ",", "'LETTUCE_AVOID_APPS'", ")", "and", "isinstance", "(", "settings", ".", "LETTUCE_AVOID_APPS", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "for", "appname", "in", "settings", ".", "LETTUCE_AVOID_APPS", ":", "if", "module", ".", "__name__", ".", "startswith", "(", "appname", ")", ":", "run_app", "=", "True", "return", "(", "not", "run_app", ")" ]
returns apps that are not within django .
train
false
54,734
def get_num_instances(server=None, version=None): req = servers_service_pb.GetNumInstancesRequest() if server: req.set_server(server) if version: req.set_version(version) resp = servers_service_pb.GetNumInstancesResponse() try: apiproxy_stub_map.MakeSyncCall('servers', 'GetNumInstances', req, resp) except apiproxy_errors.ApplicationError as e: if (e.application_error == servers_service_pb.ServersServiceError.INVALID_VERSION): raise InvalidVersionError() else: raise Error() return resp.instances()
[ "def", "get_num_instances", "(", "server", "=", "None", ",", "version", "=", "None", ")", ":", "req", "=", "servers_service_pb", ".", "GetNumInstancesRequest", "(", ")", "if", "server", ":", "req", ".", "set_server", "(", "server", ")", "if", "version", ":", "req", ".", "set_version", "(", "version", ")", "resp", "=", "servers_service_pb", ".", "GetNumInstancesResponse", "(", ")", "try", ":", "apiproxy_stub_map", ".", "MakeSyncCall", "(", "'servers'", ",", "'GetNumInstances'", ",", "req", ",", "resp", ")", "except", "apiproxy_errors", ".", "ApplicationError", "as", "e", ":", "if", "(", "e", ".", "application_error", "==", "servers_service_pb", ".", "ServersServiceError", ".", "INVALID_VERSION", ")", ":", "raise", "InvalidVersionError", "(", ")", "else", ":", "raise", "Error", "(", ")", "return", "resp", ".", "instances", "(", ")" ]
return the number of instances that are set for the given server version .
train
false
54,737
@requires_application() def test_functionality_desktop(): _test_functionality('gl2')
[ "@", "requires_application", "(", ")", "def", "test_functionality_desktop", "(", ")", ":", "_test_functionality", "(", "'gl2'", ")" ]
test desktop gl backend for full functionality .
train
false
54,740
def has_open_quotes(s): if (s.count('"') % 2): return '"' elif (s.count("'") % 2): return "'" else: return False
[ "def", "has_open_quotes", "(", "s", ")", ":", "if", "(", "s", ".", "count", "(", "'\"'", ")", "%", "2", ")", ":", "return", "'\"'", "elif", "(", "s", ".", "count", "(", "\"'\"", ")", "%", "2", ")", ":", "return", "\"'\"", "else", ":", "return", "False" ]
return whether a string has open quotes .
train
false
54,741
def internJID(jidstring): if (jidstring in __internJIDs): return __internJIDs[jidstring] else: j = JID(jidstring) __internJIDs[jidstring] = j return j
[ "def", "internJID", "(", "jidstring", ")", ":", "if", "(", "jidstring", "in", "__internJIDs", ")", ":", "return", "__internJIDs", "[", "jidstring", "]", "else", ":", "j", "=", "JID", "(", "jidstring", ")", "__internJIDs", "[", "jidstring", "]", "=", "j", "return", "j" ]
return interned jid .
train
false
54,743
def get_user_hash(request): ip = request.META.get('REMOTE_ADDR', '') ua = request.META.get('User-Agent', '') session_key = (request.session.session_key or '') return hashlib.sha1('-'.join(map(str, (ip, ua, session_key)))).hexdigest()
[ "def", "get_user_hash", "(", "request", ")", ":", "ip", "=", "request", ".", "META", ".", "get", "(", "'REMOTE_ADDR'", ",", "''", ")", "ua", "=", "request", ".", "META", ".", "get", "(", "'User-Agent'", ",", "''", ")", "session_key", "=", "(", "request", ".", "session", ".", "session_key", "or", "''", ")", "return", "hashlib", ".", "sha1", "(", "'-'", ".", "join", "(", "map", "(", "str", ",", "(", "ip", ",", "ua", ",", "session_key", ")", ")", ")", ")", ".", "hexdigest", "(", ")" ]
get a hash identifying an user .
train
false
54,744
def test_max_pool(): X_sym = tensor.tensor4('X') pool_it = max_pool(X_sym, pool_shape=(2, 2), pool_stride=(2, 2), image_shape=(6, 4)) f = theano.function(inputs=[X_sym], outputs=pool_it) X = np.array([[2, 1, 3, 4], [1, 1, 3, 3], [5, 5, 7, 7], [5, 6, 8, 7], [9, 10, 11, 12], [9, 10, 12, 12]], dtype=theano.config.floatX)[np.newaxis, np.newaxis, ...] expected = np.array([[2, 4], [6, 8], [10, 12]], dtype=theano.config.floatX)[np.newaxis, np.newaxis, ...] actual = f(X) assert np.allclose(expected, actual)
[ "def", "test_max_pool", "(", ")", ":", "X_sym", "=", "tensor", ".", "tensor4", "(", "'X'", ")", "pool_it", "=", "max_pool", "(", "X_sym", ",", "pool_shape", "=", "(", "2", ",", "2", ")", ",", "pool_stride", "=", "(", "2", ",", "2", ")", ",", "image_shape", "=", "(", "6", ",", "4", ")", ")", "f", "=", "theano", ".", "function", "(", "inputs", "=", "[", "X_sym", "]", ",", "outputs", "=", "pool_it", ")", "X", "=", "np", ".", "array", "(", "[", "[", "2", ",", "1", ",", "3", ",", "4", "]", ",", "[", "1", ",", "1", ",", "3", ",", "3", "]", ",", "[", "5", ",", "5", ",", "7", ",", "7", "]", ",", "[", "5", ",", "6", ",", "8", ",", "7", "]", ",", "[", "9", ",", "10", ",", "11", ",", "12", "]", ",", "[", "9", ",", "10", ",", "12", ",", "12", "]", "]", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "[", "np", ".", "newaxis", ",", "np", ".", "newaxis", ",", "...", "]", "expected", "=", "np", ".", "array", "(", "[", "[", "2", ",", "4", "]", ",", "[", "6", ",", "8", "]", ",", "[", "10", ",", "12", "]", "]", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "[", "np", ".", "newaxis", ",", "np", ".", "newaxis", ",", "...", "]", "actual", "=", "f", "(", "X", ")", "assert", "np", ".", "allclose", "(", "expected", ",", "actual", ")" ]
test max pooling for known result .
train
false
54,745
def test_alknn_not_good_object(): nn = 'rnd' allknn = AllKNN(n_neighbors=nn, random_state=RND_SEED, kind_sel='mode') assert_raises(ValueError, allknn.fit_sample, X, Y)
[ "def", "test_alknn_not_good_object", "(", ")", ":", "nn", "=", "'rnd'", "allknn", "=", "AllKNN", "(", "n_neighbors", "=", "nn", ",", "random_state", "=", "RND_SEED", ",", "kind_sel", "=", "'mode'", ")", "assert_raises", "(", "ValueError", ",", "allknn", ".", "fit_sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised while a wrong type of nn is given .
train
false
54,746
def required_estimates_fields(columns): return metadata_columns.union(viewvalues(columns))
[ "def", "required_estimates_fields", "(", "columns", ")", ":", "return", "metadata_columns", ".", "union", "(", "viewvalues", "(", "columns", ")", ")" ]
compute the set of resource columns required to serve columns .
train
false
54,749
def _create_base_cipher(dict_parameters): use_aesni = dict_parameters.pop('use_aesni', True) try: key = dict_parameters.pop('key') except KeyError: raise TypeError("Missing 'key' parameter") expect_byte_string(key) if (len(key) not in key_size): raise ValueError(('Incorrect AES key length (%d bytes)' % len(key))) if (use_aesni and _raw_aesni_lib): start_operation = _raw_aesni_lib.AESNI_start_operation stop_operation = _raw_aesni_lib.AESNI_stop_operation else: start_operation = _raw_aes_lib.AES_start_operation stop_operation = _raw_aes_lib.AES_stop_operation cipher = VoidPointer() result = start_operation(key, c_size_t(len(key)), cipher.address_of()) if result: raise ValueError(('Error %X while instantiating the AES cipher' % result)) return SmartPointer(cipher.get(), stop_operation)
[ "def", "_create_base_cipher", "(", "dict_parameters", ")", ":", "use_aesni", "=", "dict_parameters", ".", "pop", "(", "'use_aesni'", ",", "True", ")", "try", ":", "key", "=", "dict_parameters", ".", "pop", "(", "'key'", ")", "except", "KeyError", ":", "raise", "TypeError", "(", "\"Missing 'key' parameter\"", ")", "expect_byte_string", "(", "key", ")", "if", "(", "len", "(", "key", ")", "not", "in", "key_size", ")", ":", "raise", "ValueError", "(", "(", "'Incorrect AES key length (%d bytes)'", "%", "len", "(", "key", ")", ")", ")", "if", "(", "use_aesni", "and", "_raw_aesni_lib", ")", ":", "start_operation", "=", "_raw_aesni_lib", ".", "AESNI_start_operation", "stop_operation", "=", "_raw_aesni_lib", ".", "AESNI_stop_operation", "else", ":", "start_operation", "=", "_raw_aes_lib", ".", "AES_start_operation", "stop_operation", "=", "_raw_aes_lib", ".", "AES_stop_operation", "cipher", "=", "VoidPointer", "(", ")", "result", "=", "start_operation", "(", "key", ",", "c_size_t", "(", "len", "(", "key", ")", ")", ",", "cipher", ".", "address_of", "(", ")", ")", "if", "result", ":", "raise", "ValueError", "(", "(", "'Error %X while instantiating the AES cipher'", "%", "result", ")", ")", "return", "SmartPointer", "(", "cipher", ".", "get", "(", ")", ",", "stop_operation", ")" ]
this method instantiates and returns a handle to a low-level base cipher .
train
false
54,750
def load_check(agentConfig, hostname, checkname): from jmxfetch import JMX_CHECKS agentConfig['checksd_hostname'] = hostname osname = get_os() checks_places = get_checks_places(osname, agentConfig) for config_path in _file_configs_paths(osname, agentConfig): check_name = _conf_path_to_check_name(config_path) if ((check_name == checkname) and (check_name not in JMX_CHECKS)): (conf_is_valid, check_config, invalid_check) = _load_file_config(config_path, check_name, agentConfig) if (invalid_check and (not conf_is_valid)): return invalid_check (load_success, load_failure) = load_check_from_places(check_config, check_name, checks_places, agentConfig) return (load_success.values()[0] or load_failure) for (check_name, service_disco_check_config) in _service_disco_configs(agentConfig).iteritems(): if (check_name == checkname): (sd_init_config, sd_instances) = service_disco_check_config[1] check_config = {'init_config': sd_init_config, 'instances': sd_instances} (load_success, load_failure) = load_check_from_places(check_config, check_name, checks_places, agentConfig) return (load_success.values()[0] or load_failure) return None
[ "def", "load_check", "(", "agentConfig", ",", "hostname", ",", "checkname", ")", ":", "from", "jmxfetch", "import", "JMX_CHECKS", "agentConfig", "[", "'checksd_hostname'", "]", "=", "hostname", "osname", "=", "get_os", "(", ")", "checks_places", "=", "get_checks_places", "(", "osname", ",", "agentConfig", ")", "for", "config_path", "in", "_file_configs_paths", "(", "osname", ",", "agentConfig", ")", ":", "check_name", "=", "_conf_path_to_check_name", "(", "config_path", ")", "if", "(", "(", "check_name", "==", "checkname", ")", "and", "(", "check_name", "not", "in", "JMX_CHECKS", ")", ")", ":", "(", "conf_is_valid", ",", "check_config", ",", "invalid_check", ")", "=", "_load_file_config", "(", "config_path", ",", "check_name", ",", "agentConfig", ")", "if", "(", "invalid_check", "and", "(", "not", "conf_is_valid", ")", ")", ":", "return", "invalid_check", "(", "load_success", ",", "load_failure", ")", "=", "load_check_from_places", "(", "check_config", ",", "check_name", ",", "checks_places", ",", "agentConfig", ")", "return", "(", "load_success", ".", "values", "(", ")", "[", "0", "]", "or", "load_failure", ")", "for", "(", "check_name", ",", "service_disco_check_config", ")", "in", "_service_disco_configs", "(", "agentConfig", ")", ".", "iteritems", "(", ")", ":", "if", "(", "check_name", "==", "checkname", ")", ":", "(", "sd_init_config", ",", "sd_instances", ")", "=", "service_disco_check_config", "[", "1", "]", "check_config", "=", "{", "'init_config'", ":", "sd_init_config", ",", "'instances'", ":", "sd_instances", "}", "(", "load_success", ",", "load_failure", ")", "=", "load_check_from_places", "(", "check_config", ",", "check_name", ",", "checks_places", ",", "agentConfig", ")", "return", "(", "load_success", ".", "values", "(", ")", "[", "0", "]", "or", "load_failure", ")", "return", "None" ]
same logic as load_check_directory except it loads one specific check .
train
false
54,753
def get_visibility(name): if SPECIAL.match(name): visibility = 'special' elif PRIVATE.match(name): visibility = 'private' elif PROTECTED.match(name): visibility = 'protected' else: visibility = 'public' return visibility
[ "def", "get_visibility", "(", "name", ")", ":", "if", "SPECIAL", ".", "match", "(", "name", ")", ":", "visibility", "=", "'special'", "elif", "PRIVATE", ".", "match", "(", "name", ")", ":", "visibility", "=", "'private'", "elif", "PROTECTED", ".", "match", "(", "name", ")", ":", "visibility", "=", "'protected'", "else", ":", "visibility", "=", "'public'", "return", "visibility" ]
return the visibility from a name: public .
train
true
54,754
def test_np_rng(): rngs = [make_np_rng(rng_or_seed=42, which_method='uniform'), make_np_rng(rng_or_seed=numpy.random.RandomState(42), which_method='uniform'), make_np_rng(default_seed=42), make_np_rng()] random_numbers = rngs[0].uniform(size=(100,)) equals = numpy.ones((100,)) for rng in rngs[1:]: equal = (random_numbers == rng.uniform(size=(100,))) equals *= equal assert equals.all()
[ "def", "test_np_rng", "(", ")", ":", "rngs", "=", "[", "make_np_rng", "(", "rng_or_seed", "=", "42", ",", "which_method", "=", "'uniform'", ")", ",", "make_np_rng", "(", "rng_or_seed", "=", "numpy", ".", "random", ".", "RandomState", "(", "42", ")", ",", "which_method", "=", "'uniform'", ")", ",", "make_np_rng", "(", "default_seed", "=", "42", ")", ",", "make_np_rng", "(", ")", "]", "random_numbers", "=", "rngs", "[", "0", "]", ".", "uniform", "(", "size", "=", "(", "100", ",", ")", ")", "equals", "=", "numpy", ".", "ones", "(", "(", "100", ",", ")", ")", "for", "rng", "in", "rngs", "[", "1", ":", "]", ":", "equal", "=", "(", "random_numbers", "==", "rng", ".", "uniform", "(", "size", "=", "(", "100", ",", ")", ")", ")", "equals", "*=", "equal", "assert", "equals", ".", "all", "(", ")" ]
tests that the four possible ways of creating a numpy rng give the same results with the same seed .
train
false
54,755
def track_time_change(year=None, month=None, day=None, hour=None, minute=None, second=None): def track_time_change_decorator(action): 'Decorator to track time changes.' event.track_time_change(HASS, functools.partial(action, HASS), year, month, day, hour, minute, second) return action return track_time_change_decorator
[ "def", "track_time_change", "(", "year", "=", "None", ",", "month", "=", "None", ",", "day", "=", "None", ",", "hour", "=", "None", ",", "minute", "=", "None", ",", "second", "=", "None", ")", ":", "def", "track_time_change_decorator", "(", "action", ")", ":", "event", ".", "track_time_change", "(", "HASS", ",", "functools", ".", "partial", "(", "action", ",", "HASS", ")", ",", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", "return", "action", "return", "track_time_change_decorator" ]
decorator factory to track time changes .
train
false
54,756
def lz4_encode(payload): return lz4f.compressFrame(payload)
[ "def", "lz4_encode", "(", "payload", ")", ":", "return", "lz4f", ".", "compressFrame", "(", "payload", ")" ]
encode payload using interoperable lz4 framing .
train
false
54,757
def rc(group, **kwargs): aliases = {u'lw': u'linewidth', u'ls': u'linestyle', u'c': u'color', u'fc': u'facecolor', u'ec': u'edgecolor', u'mew': u'markeredgewidth', u'aa': u'antialiased'} if is_string_like(group): group = (group,) for g in group: for (k, v) in six.iteritems(kwargs): name = (aliases.get(k) or k) key = (u'%s.%s' % (g, name)) try: rcParams[key] = v except KeyError: raise KeyError((u'Unrecognized key "%s" for group "%s" and name "%s"' % (key, g, name)))
[ "def", "rc", "(", "group", ",", "**", "kwargs", ")", ":", "aliases", "=", "{", "u'lw'", ":", "u'linewidth'", ",", "u'ls'", ":", "u'linestyle'", ",", "u'c'", ":", "u'color'", ",", "u'fc'", ":", "u'facecolor'", ",", "u'ec'", ":", "u'edgecolor'", ",", "u'mew'", ":", "u'markeredgewidth'", ",", "u'aa'", ":", "u'antialiased'", "}", "if", "is_string_like", "(", "group", ")", ":", "group", "=", "(", "group", ",", ")", "for", "g", "in", "group", ":", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "kwargs", ")", ":", "name", "=", "(", "aliases", ".", "get", "(", "k", ")", "or", "k", ")", "key", "=", "(", "u'%s.%s'", "%", "(", "g", ",", "name", ")", ")", "try", ":", "rcParams", "[", "key", "]", "=", "v", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "u'Unrecognized key \"%s\" for group \"%s\" and name \"%s\"'", "%", "(", "key", ",", "g", ",", "name", ")", ")", ")" ]
set the current rc params .
train
false
54,758
@allow_public def contribute_view(request): return serve(request, 'contribute.json', document_root=settings.ROOT)
[ "@", "allow_public", "def", "contribute_view", "(", "request", ")", ":", "return", "serve", "(", "request", ",", "'contribute.json'", ",", "document_root", "=", "settings", ".", "ROOT", ")" ]
generate a contribute .
train
false
54,759
def _get_view_to_display_matrix(scene): from mayavi.core.ui.mayavi_scene import MayaviScene from tvtk.pyface.tvtk_scene import TVTKScene if (not isinstance(scene, (MayaviScene, TVTKScene))): raise TypeError(('scene must be an instance of TVTKScene/MayaviScene, found type %s' % type(scene))) (x, y) = tuple(scene.get_size()) view_to_disp_mat = np.array([[(x / 2.0), 0.0, 0.0, (x / 2.0)], [0.0, ((- y) / 2.0), 0.0, (y / 2.0)], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]) return view_to_disp_mat
[ "def", "_get_view_to_display_matrix", "(", "scene", ")", ":", "from", "mayavi", ".", "core", ".", "ui", ".", "mayavi_scene", "import", "MayaviScene", "from", "tvtk", ".", "pyface", ".", "tvtk_scene", "import", "TVTKScene", "if", "(", "not", "isinstance", "(", "scene", ",", "(", "MayaviScene", ",", "TVTKScene", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'scene must be an instance of TVTKScene/MayaviScene, found type %s'", "%", "type", "(", "scene", ")", ")", ")", "(", "x", ",", "y", ")", "=", "tuple", "(", "scene", ".", "get_size", "(", ")", ")", "view_to_disp_mat", "=", "np", ".", "array", "(", "[", "[", "(", "x", "/", "2.0", ")", ",", "0.0", ",", "0.0", ",", "(", "x", "/", "2.0", ")", "]", ",", "[", "0.0", ",", "(", "(", "-", "y", ")", "/", "2.0", ")", ",", "0.0", ",", "(", "y", "/", "2.0", ")", "]", ",", "[", "0.0", ",", "0.0", ",", "1.0", ",", "0.0", "]", ",", "[", "0.0", ",", "0.0", ",", "0.0", ",", "1.0", "]", "]", ")", "return", "view_to_disp_mat" ]
return the 4x4 matrix to convert view coordinates to display coordinates .
train
false
54,760
def debug_msg(text, prefix='Debug'): msg(colorize(('%s: %s' % (prefix, str(text))), 'cyan'))
[ "def", "debug_msg", "(", "text", ",", "prefix", "=", "'Debug'", ")", ":", "msg", "(", "colorize", "(", "(", "'%s: %s'", "%", "(", "prefix", ",", "str", "(", "text", ")", ")", ")", ",", "'cyan'", ")", ")" ]
colorize debug message with prefix .
train
false
54,764
@FileSystem.in_directory(current_directory, 'django', 'alfaces') def test_django_background_server_running_in_background(): import tornado.ioloop import tornado.web class MainHandler(tornado.web.RequestHandler, ): def get(self): self.write('Hello, world') raise SystemExit() def runserver(): application = tornado.web.Application([('/', MainHandler)]) application.listen(8000) tornado.ioloop.IOLoop.instance().start() server = multiprocessing.Process(target=runserver) server.start() time.sleep(1) e = 'Lettuce could not run the builtin Django server at 0.0.0.0:8000"\nmaybe you forgot a "runserver" instance running ?\n\nwell if you really do not want lettuce to run the server for you, then just run:\n\npython manage.py --no-server' try: (status, out) = commands.getstatusoutput('python manage.py harvest --verbosity=3 --no-color') assert_equals(out, e) assert_not_equals(status, 0) finally: os.kill(server.pid, 9)
[ "@", "FileSystem", ".", "in_directory", "(", "current_directory", ",", "'django'", ",", "'alfaces'", ")", "def", "test_django_background_server_running_in_background", "(", ")", ":", "import", "tornado", ".", "ioloop", "import", "tornado", ".", "web", "class", "MainHandler", "(", "tornado", ".", "web", ".", "RequestHandler", ",", ")", ":", "def", "get", "(", "self", ")", ":", "self", ".", "write", "(", "'Hello, world'", ")", "raise", "SystemExit", "(", ")", "def", "runserver", "(", ")", ":", "application", "=", "tornado", ".", "web", ".", "Application", "(", "[", "(", "'/'", ",", "MainHandler", ")", "]", ")", "application", ".", "listen", "(", "8000", ")", "tornado", ".", "ioloop", ".", "IOLoop", ".", "instance", "(", ")", ".", "start", "(", ")", "server", "=", "multiprocessing", ".", "Process", "(", "target", "=", "runserver", ")", "server", ".", "start", "(", ")", "time", ".", "sleep", "(", "1", ")", "e", "=", "'Lettuce could not run the builtin Django server at 0.0.0.0:8000\"\\nmaybe you forgot a \"runserver\" instance running ?\\n\\nwell if you really do not want lettuce to run the server for you, then just run:\\n\\npython manage.py --no-server'", "try", ":", "(", "status", ",", "out", ")", "=", "commands", ".", "getstatusoutput", "(", "'python manage.py harvest --verbosity=3 --no-color'", ")", "assert_equals", "(", "out", ",", "e", ")", "assert_not_equals", "(", "status", ",", "0", ")", "finally", ":", "os", ".", "kill", "(", "server", ".", "pid", ",", "9", ")" ]
the django builtin server fails if the http port is not available .
train
false
54,765
def human_readable_to_bytes(value): value = value.lower() if (value[(-2):] == 'ib'): suffix = value[(-3):].lower() else: suffix = value[(-2):].lower() has_size_identifier = ((len(value) >= 2) and (suffix in SIZE_SUFFIX)) if (not has_size_identifier): try: return int(value) except ValueError: raise ValueError(('Invalid size value: %s' % value)) else: multiplier = SIZE_SUFFIX[suffix] return (int(value[:(- len(suffix))]) * multiplier)
[ "def", "human_readable_to_bytes", "(", "value", ")", ":", "value", "=", "value", ".", "lower", "(", ")", "if", "(", "value", "[", "(", "-", "2", ")", ":", "]", "==", "'ib'", ")", ":", "suffix", "=", "value", "[", "(", "-", "3", ")", ":", "]", ".", "lower", "(", ")", "else", ":", "suffix", "=", "value", "[", "(", "-", "2", ")", ":", "]", ".", "lower", "(", ")", "has_size_identifier", "=", "(", "(", "len", "(", "value", ")", ">=", "2", ")", "and", "(", "suffix", "in", "SIZE_SUFFIX", ")", ")", "if", "(", "not", "has_size_identifier", ")", ":", "try", ":", "return", "int", "(", "value", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "(", "'Invalid size value: %s'", "%", "value", ")", ")", "else", ":", "multiplier", "=", "SIZE_SUFFIX", "[", "suffix", "]", "return", "(", "int", "(", "value", "[", ":", "(", "-", "len", "(", "suffix", ")", ")", "]", ")", "*", "multiplier", ")" ]
converts a human readable size to bytes .
train
false
54,766
@mock_streams('stdout') def test_global_parallel_honors_runs_once(): @decorators.runs_once def mytask(): print 'yolo' with settings(hide('everything'), parallel=True): execute(mytask, hosts=['localhost', '127.0.0.1']) result = sys.stdout.getvalue() eq_(result, 'yolo\n') assert (result != 'yolo\nyolo\n')
[ "@", "mock_streams", "(", "'stdout'", ")", "def", "test_global_parallel_honors_runs_once", "(", ")", ":", "@", "decorators", ".", "runs_once", "def", "mytask", "(", ")", ":", "print", "'yolo'", "with", "settings", "(", "hide", "(", "'everything'", ")", ",", "parallel", "=", "True", ")", ":", "execute", "(", "mytask", ",", "hosts", "=", "[", "'localhost'", ",", "'127.0.0.1'", "]", ")", "result", "=", "sys", ".", "stdout", ".", "getvalue", "(", ")", "eq_", "(", "result", ",", "'yolo\\n'", ")", "assert", "(", "result", "!=", "'yolo\\nyolo\\n'", ")" ]
fab -p should honor @runs_once .
train
false
54,767
def request_latency(results, limit): scenario_results = [r['scenario'] for r in results if (r['scenario'].get('metrics') and r['scenario']['metrics'].get('call_durations'))] if (len(scenario_results) > 0): unique_metrics = [] for result in scenario_results: if (result['metrics'] not in unique_metrics): unique_metrics.append(result['metrics']) total_requests = 0 requests_under_limit = 0 for metric in unique_metrics: for (duration, num_requests) in metric['call_durations'].iteritems(): if (float(duration) <= limit): requests_under_limit += num_requests total_requests += (metric['ok_count'] + metric['err_count']) return (float(requests_under_limit) / total_requests) return None
[ "def", "request_latency", "(", "results", ",", "limit", ")", ":", "scenario_results", "=", "[", "r", "[", "'scenario'", "]", "for", "r", "in", "results", "if", "(", "r", "[", "'scenario'", "]", ".", "get", "(", "'metrics'", ")", "and", "r", "[", "'scenario'", "]", "[", "'metrics'", "]", ".", "get", "(", "'call_durations'", ")", ")", "]", "if", "(", "len", "(", "scenario_results", ")", ">", "0", ")", ":", "unique_metrics", "=", "[", "]", "for", "result", "in", "scenario_results", ":", "if", "(", "result", "[", "'metrics'", "]", "not", "in", "unique_metrics", ")", ":", "unique_metrics", ".", "append", "(", "result", "[", "'metrics'", "]", ")", "total_requests", "=", "0", "requests_under_limit", "=", "0", "for", "metric", "in", "unique_metrics", ":", "for", "(", "duration", ",", "num_requests", ")", "in", "metric", "[", "'call_durations'", "]", ".", "iteritems", "(", ")", ":", "if", "(", "float", "(", "duration", ")", "<=", "limit", ")", ":", "requests_under_limit", "+=", "num_requests", "total_requests", "+=", "(", "metric", "[", "'ok_count'", "]", "+", "metric", "[", "'err_count'", "]", ")", "return", "(", "float", "(", "requests_under_limit", ")", "/", "total_requests", ")", "return", "None" ]
calculate the percentage of scenario requests have a latency under the specified time limit .
train
false
54,768
@with_setup(prepare_stdout, registry.clear) def test_jsonreport_output_with_no_steps(): with check_jsonreport(u'missing_steps'): runner = Runner(feature_name(u'missing_steps'), enable_jsonreport=True) runner.run()
[ "@", "with_setup", "(", "prepare_stdout", ",", "registry", ".", "clear", ")", "def", "test_jsonreport_output_with_no_steps", "(", ")", ":", "with", "check_jsonreport", "(", "u'missing_steps'", ")", ":", "runner", "=", "Runner", "(", "feature_name", "(", "u'missing_steps'", ")", ",", "enable_jsonreport", "=", "True", ")", "runner", ".", "run", "(", ")" ]
test jsonreport output with no steps .
train
false
54,769
def second(seq): return next(itertools.islice(seq, 1, None))
[ "def", "second", "(", "seq", ")", ":", "return", "next", "(", "itertools", ".", "islice", "(", "seq", ",", "1", ",", "None", ")", ")" ]
the second element in a sequence .
train
false
54,770
@contextmanager def collect_profile(file_prefix): import cProfile import uuid profiler = cProfile.Profile() profiler.enable() try: (yield) finally: profiler.disable() profiler.dump_stats('{0}_{1}_master.profile'.format(file_prefix, uuid.uuid4()))
[ "@", "contextmanager", "def", "collect_profile", "(", "file_prefix", ")", ":", "import", "cProfile", "import", "uuid", "profiler", "=", "cProfile", ".", "Profile", "(", ")", "profiler", ".", "enable", "(", ")", "try", ":", "(", "yield", ")", "finally", ":", "profiler", ".", "disable", "(", ")", "profiler", ".", "dump_stats", "(", "'{0}_{1}_master.profile'", ".", "format", "(", "file_prefix", ",", "uuid", ".", "uuid4", "(", ")", ")", ")" ]
context manager to collect profile information .
train
false
54,771
def guard_null(context, builder, value, exc_tuple): with builder.if_then(is_scalar_zero(builder, value), likely=False): exc = exc_tuple[0] exc_args = (exc_tuple[1:] or None) context.call_conv.return_user_exc(builder, exc, exc_args)
[ "def", "guard_null", "(", "context", ",", "builder", ",", "value", ",", "exc_tuple", ")", ":", "with", "builder", ".", "if_then", "(", "is_scalar_zero", "(", "builder", ",", "value", ")", ",", "likely", "=", "False", ")", ":", "exc", "=", "exc_tuple", "[", "0", "]", "exc_args", "=", "(", "exc_tuple", "[", "1", ":", "]", "or", "None", ")", "context", ".", "call_conv", ".", "return_user_exc", "(", "builder", ",", "exc", ",", "exc_args", ")" ]
guard against *value* being null or zero .
train
false
54,772
def ode_separable(eq, func, order, match): x = func.args[0] f = func.func C1 = get_numbered_constants(eq, num=1) r = match u = r.get('hint', f(x)) return Eq(Integral(((r['m2']['coeff'] * r['m2'][r['y']]) / r['m1'][r['y']]), (r['y'], None, u)), (Integral((((- r['m1']['coeff']) * r['m1'][x]) / r['m2'][x]), x) + C1))
[ "def", "ode_separable", "(", "eq", ",", "func", ",", "order", ",", "match", ")", ":", "x", "=", "func", ".", "args", "[", "0", "]", "f", "=", "func", ".", "func", "C1", "=", "get_numbered_constants", "(", "eq", ",", "num", "=", "1", ")", "r", "=", "match", "u", "=", "r", ".", "get", "(", "'hint'", ",", "f", "(", "x", ")", ")", "return", "Eq", "(", "Integral", "(", "(", "(", "r", "[", "'m2'", "]", "[", "'coeff'", "]", "*", "r", "[", "'m2'", "]", "[", "r", "[", "'y'", "]", "]", ")", "/", "r", "[", "'m1'", "]", "[", "r", "[", "'y'", "]", "]", ")", ",", "(", "r", "[", "'y'", "]", ",", "None", ",", "u", ")", ")", ",", "(", "Integral", "(", "(", "(", "(", "-", "r", "[", "'m1'", "]", "[", "'coeff'", "]", ")", "*", "r", "[", "'m1'", "]", "[", "x", "]", ")", "/", "r", "[", "'m2'", "]", "[", "x", "]", ")", ",", "x", ")", "+", "C1", ")", ")" ]
solves separable 1st order differential equations .
train
false
54,773
@pytest.fixture() def celery_app(request, celery_config, celery_parameters, celery_enable_logging, use_celery_app_trap): mark = request.node.get_marker(u'celery') config = dict(celery_config, **(mark.kwargs if mark else {})) with _create_app(request, enable_logging=celery_enable_logging, use_trap=use_celery_app_trap, parameters=celery_parameters, **config) as app: (yield app)
[ "@", "pytest", ".", "fixture", "(", ")", "def", "celery_app", "(", "request", ",", "celery_config", ",", "celery_parameters", ",", "celery_enable_logging", ",", "use_celery_app_trap", ")", ":", "mark", "=", "request", ".", "node", ".", "get_marker", "(", "u'celery'", ")", "config", "=", "dict", "(", "celery_config", ",", "**", "(", "mark", ".", "kwargs", "if", "mark", "else", "{", "}", ")", ")", "with", "_create_app", "(", "request", ",", "enable_logging", "=", "celery_enable_logging", ",", "use_trap", "=", "use_celery_app_trap", ",", "parameters", "=", "celery_parameters", ",", "**", "config", ")", "as", "app", ":", "(", "yield", "app", ")" ]
fixture creating a celery application instance .
train
false
54,774
def checkMatch(input, prediction, sparse=True, verbosity=0): if sparse: activeElementsInInput = set(input) activeElementsInPrediction = set(prediction) else: activeElementsInInput = set(input.nonzero()[0]) activeElementsInPrediction = set(prediction.nonzero()[0]) totalActiveInPrediction = len(activeElementsInPrediction) totalActiveInInput = len(activeElementsInInput) foundInInput = len(activeElementsInPrediction.intersection(activeElementsInInput)) missingFromInput = len(activeElementsInPrediction.difference(activeElementsInInput)) missingFromPrediction = len(activeElementsInInput.difference(activeElementsInPrediction)) if (verbosity >= 1): print 'preds. found in input:', foundInInput, 'out of', totalActiveInPrediction, print '; preds. missing from input:', missingFromInput, 'out of', totalActiveInPrediction, print '; unexpected active in input:', missingFromPrediction, 'out of', totalActiveInInput return (foundInInput, totalActiveInInput, missingFromInput, totalActiveInPrediction)
[ "def", "checkMatch", "(", "input", ",", "prediction", ",", "sparse", "=", "True", ",", "verbosity", "=", "0", ")", ":", "if", "sparse", ":", "activeElementsInInput", "=", "set", "(", "input", ")", "activeElementsInPrediction", "=", "set", "(", "prediction", ")", "else", ":", "activeElementsInInput", "=", "set", "(", "input", ".", "nonzero", "(", ")", "[", "0", "]", ")", "activeElementsInPrediction", "=", "set", "(", "prediction", ".", "nonzero", "(", ")", "[", "0", "]", ")", "totalActiveInPrediction", "=", "len", "(", "activeElementsInPrediction", ")", "totalActiveInInput", "=", "len", "(", "activeElementsInInput", ")", "foundInInput", "=", "len", "(", "activeElementsInPrediction", ".", "intersection", "(", "activeElementsInInput", ")", ")", "missingFromInput", "=", "len", "(", "activeElementsInPrediction", ".", "difference", "(", "activeElementsInInput", ")", ")", "missingFromPrediction", "=", "len", "(", "activeElementsInInput", ".", "difference", "(", "activeElementsInPrediction", ")", ")", "if", "(", "verbosity", ">=", "1", ")", ":", "print", "'preds. found in input:'", ",", "foundInInput", ",", "'out of'", ",", "totalActiveInPrediction", ",", "print", "'; preds. missing from input:'", ",", "missingFromInput", ",", "'out of'", ",", "totalActiveInPrediction", ",", "print", "'; unexpected active in input:'", ",", "missingFromPrediction", ",", "'out of'", ",", "totalActiveInInput", "return", "(", "foundInInput", ",", "totalActiveInInput", ",", "missingFromInput", ",", "totalActiveInPrediction", ")" ]
compares the actual input with the predicted input and returns results parameters: input: the actual input prediction: the predicted input verbosity: if > 0 .
train
true
54,775
def _user_has_module_perms(user, app_label): for backend in auth.get_backends(): if (not hasattr(backend, 'has_module_perms')): continue try: if backend.has_module_perms(user, app_label): return True except PermissionDenied: return False return False
[ "def", "_user_has_module_perms", "(", "user", ",", "app_label", ")", ":", "for", "backend", "in", "auth", ".", "get_backends", "(", ")", ":", "if", "(", "not", "hasattr", "(", "backend", ",", "'has_module_perms'", ")", ")", ":", "continue", "try", ":", "if", "backend", ".", "has_module_perms", "(", "user", ",", "app_label", ")", ":", "return", "True", "except", "PermissionDenied", ":", "return", "False", "return", "False" ]
a backend can raise permissiondenied to short-circuit permission checking .
train
false
54,777
@with_setup(setup, teardown) def test_show_negative_chains(): negative_chains.show_negative_chains('dbm.pkl')
[ "@", "with_setup", "(", "setup", ",", "teardown", ")", "def", "test_show_negative_chains", "(", ")", ":", "negative_chains", ".", "show_negative_chains", "(", "'dbm.pkl'", ")" ]
test the show_negative_chains script main function .
train
false
54,779
def _infer_decorator_callchain(node): if (not isinstance(node, Function)): return if (not node.parent): return try: result = next(node.infer_call_result(node.parent)) except (StopIteration, InferenceError): return if isinstance(result, Instance): result = result._proxied if isinstance(result, Class): if result.is_subtype_of(('%s.classmethod' % BUILTINS)): return 'classmethod' if result.is_subtype_of(('%s.staticmethod' % BUILTINS)): return 'staticmethod'
[ "def", "_infer_decorator_callchain", "(", "node", ")", ":", "if", "(", "not", "isinstance", "(", "node", ",", "Function", ")", ")", ":", "return", "if", "(", "not", "node", ".", "parent", ")", ":", "return", "try", ":", "result", "=", "next", "(", "node", ".", "infer_call_result", "(", "node", ".", "parent", ")", ")", "except", "(", "StopIteration", ",", "InferenceError", ")", ":", "return", "if", "isinstance", "(", "result", ",", "Instance", ")", ":", "result", "=", "result", ".", "_proxied", "if", "isinstance", "(", "result", ",", "Class", ")", ":", "if", "result", ".", "is_subtype_of", "(", "(", "'%s.classmethod'", "%", "BUILTINS", ")", ")", ":", "return", "'classmethod'", "if", "result", ".", "is_subtype_of", "(", "(", "'%s.staticmethod'", "%", "BUILTINS", ")", ")", ":", "return", "'staticmethod'" ]
detect decorator call chaining and see if the end result is a static or a classmethod .
train
false
54,780
def txt_records_for_name(name): if (not DNS_AVAILABLE): raise errors.DependencyError('{0} is required to use this function'.format(DNS_REQUIREMENT)) try: dns_response = dns.resolver.query(name, 'TXT') except dns.resolver.NXDOMAIN as error: return [] except dns.exception.DNSException as error: logger.error('Error resolving %s: %s', name, str(error)) return [] return [txt_rec.decode('utf-8') for rdata in dns_response for txt_rec in rdata.strings]
[ "def", "txt_records_for_name", "(", "name", ")", ":", "if", "(", "not", "DNS_AVAILABLE", ")", ":", "raise", "errors", ".", "DependencyError", "(", "'{0} is required to use this function'", ".", "format", "(", "DNS_REQUIREMENT", ")", ")", "try", ":", "dns_response", "=", "dns", ".", "resolver", ".", "query", "(", "name", ",", "'TXT'", ")", "except", "dns", ".", "resolver", ".", "NXDOMAIN", "as", "error", ":", "return", "[", "]", "except", "dns", ".", "exception", ".", "DNSException", "as", "error", ":", "logger", ".", "error", "(", "'Error resolving %s: %s'", ",", "name", ",", "str", "(", "error", ")", ")", "return", "[", "]", "return", "[", "txt_rec", ".", "decode", "(", "'utf-8'", ")", "for", "rdata", "in", "dns_response", "for", "txt_rec", "in", "rdata", ".", "strings", "]" ]
resolve the name and return the txt records .
train
false
54,781
def write_csv_file(path, app_messages, lang_dict): app_messages.sort((lambda x, y: cmp(x[1], y[1]))) from csv import writer with open(path, u'wb') as msgfile: w = writer(msgfile, lineterminator=u'\n') for (p, m) in app_messages: t = lang_dict.get(m, u'') t = re.sub(u'{\\s?([0-9]+)\\s?}', u'{\\g<1>}', t) w.writerow([(p.encode(u'utf-8') if p else u''), m.encode(u'utf-8'), t.encode(u'utf-8')])
[ "def", "write_csv_file", "(", "path", ",", "app_messages", ",", "lang_dict", ")", ":", "app_messages", ".", "sort", "(", "(", "lambda", "x", ",", "y", ":", "cmp", "(", "x", "[", "1", "]", ",", "y", "[", "1", "]", ")", ")", ")", "from", "csv", "import", "writer", "with", "open", "(", "path", ",", "u'wb'", ")", "as", "msgfile", ":", "w", "=", "writer", "(", "msgfile", ",", "lineterminator", "=", "u'\\n'", ")", "for", "(", "p", ",", "m", ")", "in", "app_messages", ":", "t", "=", "lang_dict", ".", "get", "(", "m", ",", "u''", ")", "t", "=", "re", ".", "sub", "(", "u'{\\\\s?([0-9]+)\\\\s?}'", ",", "u'{\\\\g<1>}'", ",", "t", ")", "w", ".", "writerow", "(", "[", "(", "p", ".", "encode", "(", "u'utf-8'", ")", "if", "p", "else", "u''", ")", ",", "m", ".", "encode", "(", "u'utf-8'", ")", ",", "t", ".", "encode", "(", "u'utf-8'", ")", "]", ")" ]
write translation csv file .
train
false
54,782
def popen_wrapper(args, os_err_exc_type=CommandError): try: p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE, close_fds=(os.name != 'nt'), universal_newlines=True) except OSError as e: six.reraise(os_err_exc_type, os_err_exc_type(('Error executing %s: %s' % (args[0], e.strerror))), sys.exc_info()[2]) (output, errors) = p.communicate() return (output, force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True), p.returncode)
[ "def", "popen_wrapper", "(", "args", ",", "os_err_exc_type", "=", "CommandError", ")", ":", "try", ":", "p", "=", "Popen", "(", "args", ",", "shell", "=", "False", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "close_fds", "=", "(", "os", ".", "name", "!=", "'nt'", ")", ",", "universal_newlines", "=", "True", ")", "except", "OSError", "as", "e", ":", "six", ".", "reraise", "(", "os_err_exc_type", ",", "os_err_exc_type", "(", "(", "'Error executing %s: %s'", "%", "(", "args", "[", "0", "]", ",", "e", ".", "strerror", ")", ")", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "(", "output", ",", "errors", ")", "=", "p", ".", "communicate", "(", ")", "return", "(", "output", ",", "force_text", "(", "errors", ",", "DEFAULT_LOCALE_ENCODING", ",", "strings_only", "=", "True", ")", ",", "p", ".", "returncode", ")" ]
friendly wrapper around popen .
train
false
54,783
def staticfiles_urlpatterns(prefix=None): if (prefix is None): prefix = settings.STATIC_URL return static(prefix, view='django.contrib.staticfiles.views.serve')
[ "def", "staticfiles_urlpatterns", "(", "prefix", "=", "None", ")", ":", "if", "(", "prefix", "is", "None", ")", ":", "prefix", "=", "settings", ".", "STATIC_URL", "return", "static", "(", "prefix", ",", "view", "=", "'django.contrib.staticfiles.views.serve'", ")" ]
helper function to return a url pattern for serving static files .
train
false
54,784
def match_patterns(pathname, patterns): for pattern in patterns: if fnmatch(pathname, pattern): return True return False
[ "def", "match_patterns", "(", "pathname", ",", "patterns", ")", ":", "for", "pattern", "in", "patterns", ":", "if", "fnmatch", "(", "pathname", ",", "pattern", ")", ":", "return", "True", "return", "False" ]
returns true if the pathname matches any of the given patterns .
train
true
54,785
def itemlist(tparams): return [vv for (kk, vv) in tparams.iteritems()]
[ "def", "itemlist", "(", "tparams", ")", ":", "return", "[", "vv", "for", "(", "kk", ",", "vv", ")", "in", "tparams", ".", "iteritems", "(", ")", "]" ]
get the list of parameters .
train
false
54,786
def tty(*args, **kwargs): return 'ERROR: This function has been moved to cmd.tty'
[ "def", "tty", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "'ERROR: This function has been moved to cmd.tty'" ]
deprecated! moved to cmdmod .
train
false
54,787
def GetTokenInformation(token, information_class): data_size = ctypes.wintypes.DWORD() ctypes.windll.advapi32.GetTokenInformation(token, information_class.num, 0, 0, ctypes.byref(data_size)) data = ctypes.create_string_buffer(data_size.value) handle_nonzero_success(ctypes.windll.advapi32.GetTokenInformation(token, information_class.num, ctypes.byref(data), ctypes.sizeof(data), ctypes.byref(data_size))) return ctypes.cast(data, ctypes.POINTER(TOKEN_USER)).contents
[ "def", "GetTokenInformation", "(", "token", ",", "information_class", ")", ":", "data_size", "=", "ctypes", ".", "wintypes", ".", "DWORD", "(", ")", "ctypes", ".", "windll", ".", "advapi32", ".", "GetTokenInformation", "(", "token", ",", "information_class", ".", "num", ",", "0", ",", "0", ",", "ctypes", ".", "byref", "(", "data_size", ")", ")", "data", "=", "ctypes", ".", "create_string_buffer", "(", "data_size", ".", "value", ")", "handle_nonzero_success", "(", "ctypes", ".", "windll", ".", "advapi32", ".", "GetTokenInformation", "(", "token", ",", "information_class", ".", "num", ",", "ctypes", ".", "byref", "(", "data", ")", ",", "ctypes", ".", "sizeof", "(", "data", ")", ",", "ctypes", ".", "byref", "(", "data_size", ")", ")", ")", "return", "ctypes", ".", "cast", "(", "data", ",", "ctypes", ".", "POINTER", "(", "TOKEN_USER", ")", ")", ".", "contents" ]
given a token .
train
true
54,788
def fill_diagonal(a, val, wrap=False): if (a.ndim < 2): raise ValueError('array must be at least 2-d') end = None if (a.ndim == 2): step = (a.shape[1] + 1) if (not wrap): end = (a.shape[1] * a.shape[1]) else: if (not numpy.alltrue((numpy.diff(a.shape) == 0))): raise ValueError('All dimensions of input must be of equal length') step = (1 + numpy.cumprod(a.shape[:(-1)]).sum()) a.ravel()[:end:step] = val
[ "def", "fill_diagonal", "(", "a", ",", "val", ",", "wrap", "=", "False", ")", ":", "if", "(", "a", ".", "ndim", "<", "2", ")", ":", "raise", "ValueError", "(", "'array must be at least 2-d'", ")", "end", "=", "None", "if", "(", "a", ".", "ndim", "==", "2", ")", ":", "step", "=", "(", "a", ".", "shape", "[", "1", "]", "+", "1", ")", "if", "(", "not", "wrap", ")", ":", "end", "=", "(", "a", ".", "shape", "[", "1", "]", "*", "a", ".", "shape", "[", "1", "]", ")", "else", ":", "if", "(", "not", "numpy", ".", "alltrue", "(", "(", "numpy", ".", "diff", "(", "a", ".", "shape", ")", "==", "0", ")", ")", ")", ":", "raise", "ValueError", "(", "'All dimensions of input must be of equal length'", ")", "step", "=", "(", "1", "+", "numpy", ".", "cumprod", "(", "a", ".", "shape", "[", ":", "(", "-", "1", ")", "]", ")", ".", "sum", "(", ")", ")", "a", ".", "ravel", "(", ")", "[", ":", "end", ":", "step", "]", "=", "val" ]
fill the main diagonal of the given array of any dimensionality .
train
false
54,791
def for_name(fq_name, recursive=False): fq_name = str(fq_name) module_name = __name__ short_name = fq_name if (fq_name.rfind('.') >= 0): (module_name, short_name) = (fq_name[:fq_name.rfind('.')], fq_name[(fq_name.rfind('.') + 1):]) try: result = __import__(module_name, None, None, [short_name]) return result.__dict__[short_name] except KeyError: if recursive: raise else: raise ImportError(("Could not find '%s' on path '%s'" % (short_name, module_name))) except ImportError: try: module = for_name(module_name, recursive=True) if hasattr(module, short_name): return getattr(module, short_name) else: raise KeyError() except KeyError: raise ImportError(("Could not find '%s' on path '%s'" % (short_name, module_name))) except ImportError: pass raise
[ "def", "for_name", "(", "fq_name", ",", "recursive", "=", "False", ")", ":", "fq_name", "=", "str", "(", "fq_name", ")", "module_name", "=", "__name__", "short_name", "=", "fq_name", "if", "(", "fq_name", ".", "rfind", "(", "'.'", ")", ">=", "0", ")", ":", "(", "module_name", ",", "short_name", ")", "=", "(", "fq_name", "[", ":", "fq_name", ".", "rfind", "(", "'.'", ")", "]", ",", "fq_name", "[", "(", "fq_name", ".", "rfind", "(", "'.'", ")", "+", "1", ")", ":", "]", ")", "try", ":", "result", "=", "__import__", "(", "module_name", ",", "None", ",", "None", ",", "[", "short_name", "]", ")", "return", "result", ".", "__dict__", "[", "short_name", "]", "except", "KeyError", ":", "if", "recursive", ":", "raise", "else", ":", "raise", "ImportError", "(", "(", "\"Could not find '%s' on path '%s'\"", "%", "(", "short_name", ",", "module_name", ")", ")", ")", "except", "ImportError", ":", "try", ":", "module", "=", "for_name", "(", "module_name", ",", "recursive", "=", "True", ")", "if", "hasattr", "(", "module", ",", "short_name", ")", ":", "return", "getattr", "(", "module", ",", "short_name", ")", "else", ":", "raise", "KeyError", "(", ")", "except", "KeyError", ":", "raise", "ImportError", "(", "(", "\"Could not find '%s' on path '%s'\"", "%", "(", "short_name", ",", "module_name", ")", ")", ")", "except", "ImportError", ":", "pass", "raise" ]
find class/function/method specified by its fully qualified name .
train
true
54,792
def vgcreate(vgname, devices, **kwargs): if ((not vgname) or (not devices)): return 'Error: vgname and device(s) are both required' if isinstance(devices, six.string_types): devices = devices.split(',') cmd = ['vgcreate', vgname] for device in devices: cmd.append(device) valid = ('clustered', 'maxlogicalvolumes', 'maxphysicalvolumes', 'vgmetadatacopies', 'metadatacopies', 'physicalextentsize') for var in kwargs: if (kwargs[var] and (var in valid)): cmd.append('--{0}'.format(var)) cmd.append(kwargs[var]) out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() vgdata = vgdisplay(vgname) vgdata['Output from vgcreate'] = out[0].strip() return vgdata
[ "def", "vgcreate", "(", "vgname", ",", "devices", ",", "**", "kwargs", ")", ":", "if", "(", "(", "not", "vgname", ")", "or", "(", "not", "devices", ")", ")", ":", "return", "'Error: vgname and device(s) are both required'", "if", "isinstance", "(", "devices", ",", "six", ".", "string_types", ")", ":", "devices", "=", "devices", ".", "split", "(", "','", ")", "cmd", "=", "[", "'vgcreate'", ",", "vgname", "]", "for", "device", "in", "devices", ":", "cmd", ".", "append", "(", "device", ")", "valid", "=", "(", "'clustered'", ",", "'maxlogicalvolumes'", ",", "'maxphysicalvolumes'", ",", "'vgmetadatacopies'", ",", "'metadatacopies'", ",", "'physicalextentsize'", ")", "for", "var", "in", "kwargs", ":", "if", "(", "kwargs", "[", "var", "]", "and", "(", "var", "in", "valid", ")", ")", ":", "cmd", ".", "append", "(", "'--{0}'", ".", "format", "(", "var", ")", ")", "cmd", ".", "append", "(", "kwargs", "[", "var", "]", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "vgdata", "=", "vgdisplay", "(", "vgname", ")", "vgdata", "[", "'Output from vgcreate'", "]", "=", "out", "[", "0", "]", ".", "strip", "(", ")", "return", "vgdata" ]
create an lvm volume group cli examples: .
train
true
54,794
def test_raise_exception_spatial(): sbn1 = SpatialBatchNormalization((5,)) (yield (assert_raises, (ValueError, sbn1.allocate))) sbn2 = SpatialBatchNormalization(3) (yield (assert_raises, (ValueError, sbn2.allocate))) def do_not_fail(*input_dim): try: sbn = SpatialBatchNormalization(input_dim) sbn.allocate() except ValueError: assert False (yield (do_not_fail, 5, 4, 3)) (yield (do_not_fail, 7, 6)) (yield (do_not_fail, 3, 9, 2, 3))
[ "def", "test_raise_exception_spatial", "(", ")", ":", "sbn1", "=", "SpatialBatchNormalization", "(", "(", "5", ",", ")", ")", "(", "yield", "(", "assert_raises", ",", "(", "ValueError", ",", "sbn1", ".", "allocate", ")", ")", ")", "sbn2", "=", "SpatialBatchNormalization", "(", "3", ")", "(", "yield", "(", "assert_raises", ",", "(", "ValueError", ",", "sbn2", ".", "allocate", ")", ")", ")", "def", "do_not_fail", "(", "*", "input_dim", ")", ":", "try", ":", "sbn", "=", "SpatialBatchNormalization", "(", "input_dim", ")", "sbn", ".", "allocate", "(", ")", "except", "ValueError", ":", "assert", "False", "(", "yield", "(", "do_not_fail", ",", "5", ",", "4", ",", "3", ")", ")", "(", "yield", "(", "do_not_fail", ",", "7", ",", "6", ")", ")", "(", "yield", "(", "do_not_fail", ",", "3", ",", "9", ",", "2", ",", "3", ")", ")" ]
test that spatialbatchnormalization raises an expected exception .
train
false
54,795
def _get_basic_stream(stream_name, conn): return _execute_with_retries(conn, 'describe_stream', StreamName=stream_name)
[ "def", "_get_basic_stream", "(", "stream_name", ",", "conn", ")", ":", "return", "_execute_with_retries", "(", "conn", ",", "'describe_stream'", ",", "StreamName", "=", "stream_name", ")" ]
stream info from aws .
train
false
54,796
def SampleRows(df, nrows, replace=False): indices = np.random.choice(df.index, nrows, replace=replace) sample = df.loc[indices] return sample
[ "def", "SampleRows", "(", "df", ",", "nrows", ",", "replace", "=", "False", ")", ":", "indices", "=", "np", ".", "random", ".", "choice", "(", "df", ".", "index", ",", "nrows", ",", "replace", "=", "replace", ")", "sample", "=", "df", ".", "loc", "[", "indices", "]", "return", "sample" ]
choose a sample of rows from a dataframe .
train
false
54,797
def flushall(host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.flushall()
[ "def", "flushall", "(", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "flushall", "(", ")" ]
remove all keys from all databases cli example: .
train
true
54,798
def _getwindowview(folder_alias): attrs = {} args = {} finder = _getfinder() args = {} attrs = {} aeobj_00 = aetypes.ObjectSpecifier(want=aetypes.Type('cfol'), form='alis', seld=folder_alias, fr=None) aeobj_01 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('cwnd'), fr=aeobj_00) aeobj_02 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('pvew'), fr=aeobj_01) args['----'] = aeobj_02 (_reply, args, attrs) = finder.send('core', 'getd', args, attrs) if ('errn' in args): raise Error, aetools.decodeerror(args) views = {'iimg': 0, 'pnam': 1, 'lgbu': 2} if ('----' in args): return views[args['----'].enum]
[ "def", "_getwindowview", "(", "folder_alias", ")", ":", "attrs", "=", "{", "}", "args", "=", "{", "}", "finder", "=", "_getfinder", "(", ")", "args", "=", "{", "}", "attrs", "=", "{", "}", "aeobj_00", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'cfol'", ")", ",", "form", "=", "'alis'", ",", "seld", "=", "folder_alias", ",", "fr", "=", "None", ")", "aeobj_01", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'prop'", ")", ",", "form", "=", "'prop'", ",", "seld", "=", "aetypes", ".", "Type", "(", "'cwnd'", ")", ",", "fr", "=", "aeobj_00", ")", "aeobj_02", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'prop'", ")", ",", "form", "=", "'prop'", ",", "seld", "=", "aetypes", ".", "Type", "(", "'pvew'", ")", ",", "fr", "=", "aeobj_01", ")", "args", "[", "'----'", "]", "=", "aeobj_02", "(", "_reply", ",", "args", ",", "attrs", ")", "=", "finder", ".", "send", "(", "'core'", ",", "'getd'", ",", "args", ",", "attrs", ")", "if", "(", "'errn'", "in", "args", ")", ":", "raise", "Error", ",", "aetools", ".", "decodeerror", "(", "args", ")", "views", "=", "{", "'iimg'", ":", "0", ",", "'pnam'", ":", "1", ",", "'lgbu'", ":", "2", "}", "if", "(", "'----'", "in", "args", ")", ":", "return", "views", "[", "args", "[", "'----'", "]", ".", "enum", "]" ]
get the windowview .
train
false
54,800
def check_fasta_seqs_lens(input_fasta_fp): seq_lens = defaultdict(int) input_fasta_f = open(input_fasta_fp, 'U') for (label, seq) in parse_fasta(input_fasta_f): seq_lens[len(seq)] += 1 input_fasta_f.close() formatted_seq_lens = [] for curr_key in seq_lens: formatted_seq_lens.append((seq_lens[curr_key], curr_key)) formatted_seq_lens.sort(reverse=True) return formatted_seq_lens
[ "def", "check_fasta_seqs_lens", "(", "input_fasta_fp", ")", ":", "seq_lens", "=", "defaultdict", "(", "int", ")", "input_fasta_f", "=", "open", "(", "input_fasta_fp", ",", "'U'", ")", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "input_fasta_f", ")", ":", "seq_lens", "[", "len", "(", "seq", ")", "]", "+=", "1", "input_fasta_f", ".", "close", "(", ")", "formatted_seq_lens", "=", "[", "]", "for", "curr_key", "in", "seq_lens", ":", "formatted_seq_lens", ".", "append", "(", "(", "seq_lens", "[", "curr_key", "]", ",", "curr_key", ")", ")", "formatted_seq_lens", ".", "sort", "(", "reverse", "=", "True", ")", "return", "formatted_seq_lens" ]
creates bins of sequence lens useful for checking for valid aligned sequences .
train
false
54,801
def _make_allocated_size_testcases(): for unit in (Byte, MB, MiB, GB, GiB): for size in (1, 2, 4, 8): test_case = make_allocated_size_tests(unit(size)) globals()[test_case.__name__] = test_case
[ "def", "_make_allocated_size_testcases", "(", ")", ":", "for", "unit", "in", "(", "Byte", ",", "MB", ",", "MiB", ",", "GB", ",", "GiB", ")", ":", "for", "size", "in", "(", "1", ",", "2", ",", "4", ",", "8", ")", ":", "test_case", "=", "make_allocated_size_tests", "(", "unit", "(", "size", ")", ")", "globals", "(", ")", "[", "test_case", ".", "__name__", "]", "=", "test_case" ]
build test cases for some common allocation_units .
train
false
54,803
def string_param(registry, xml_parent, data): base_param(registry, xml_parent, data, True, 'hudson.model.StringParameterDefinition')
[ "def", "string_param", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "base_param", "(", "registry", ",", "xml_parent", ",", "data", ",", "True", ",", "'hudson.model.StringParameterDefinition'", ")" ]
yaml: string a string parameter .
train
false
54,804
def parse_media_range(range): (type, subtype, params) = parse_mime_type(range) if ((not params.has_key('q')) or (not params['q']) or (not float(params['q'])) or (float(params['q']) > 1) or (float(params['q']) < 0)): params['q'] = '1' return (type, subtype, params)
[ "def", "parse_media_range", "(", "range", ")", ":", "(", "type", ",", "subtype", ",", "params", ")", "=", "parse_mime_type", "(", "range", ")", "if", "(", "(", "not", "params", ".", "has_key", "(", "'q'", ")", ")", "or", "(", "not", "params", "[", "'q'", "]", ")", "or", "(", "not", "float", "(", "params", "[", "'q'", "]", ")", ")", "or", "(", "float", "(", "params", "[", "'q'", "]", ")", ">", "1", ")", "or", "(", "float", "(", "params", "[", "'q'", "]", ")", "<", "0", ")", ")", ":", "params", "[", "'q'", "]", "=", "'1'", "return", "(", "type", ",", "subtype", ",", "params", ")" ]
parse a media-range into its component parts .
train
true
54,805
def get_writer(extension): global FORMAT_WRITERS if (FORMAT_WRITERS is None): _import_writers() return FORMAT_WRITERS.get(extension, None)
[ "def", "get_writer", "(", "extension", ")", ":", "global", "FORMAT_WRITERS", "if", "(", "FORMAT_WRITERS", "is", "None", ")", ":", "_import_writers", "(", ")", "return", "FORMAT_WRITERS", ".", "get", "(", "extension", ",", "None", ")" ]
returns none if no writer is found for extension .
train
false
54,806
def SortedConcatenate(sep=u','): def step(ctxt, ndx, value): if (value is not None): ctxt[ndx] = value def finalize(ctxt): if (len(ctxt) == 0): return None return sep.join(map(ctxt.get, sorted(ctxt.iterkeys()))) return ({}, step, finalize)
[ "def", "SortedConcatenate", "(", "sep", "=", "u','", ")", ":", "def", "step", "(", "ctxt", ",", "ndx", ",", "value", ")", ":", "if", "(", "value", "is", "not", "None", ")", ":", "ctxt", "[", "ndx", "]", "=", "value", "def", "finalize", "(", "ctxt", ")", ":", "if", "(", "len", "(", "ctxt", ")", "==", "0", ")", ":", "return", "None", "return", "sep", ".", "join", "(", "map", "(", "ctxt", ".", "get", ",", "sorted", "(", "ctxt", ".", "iterkeys", "(", ")", ")", ")", ")", "return", "(", "{", "}", ",", "step", ",", "finalize", ")" ]
string concatenation aggregator for sqlite .
train
false
54,807
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None): headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = 'gzip,deflate' headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = ('Basic ' + b64encode(six.b(basic_auth)).decode('utf-8')) if proxy_basic_auth: headers['proxy-authorization'] = ('Basic ' + b64encode(six.b(proxy_basic_auth)).decode('utf-8')) return headers
[ "def", "make_headers", "(", "keep_alive", "=", "None", ",", "accept_encoding", "=", "None", ",", "user_agent", "=", "None", ",", "basic_auth", "=", "None", ",", "proxy_basic_auth", "=", "None", ")", ":", "headers", "=", "{", "}", "if", "accept_encoding", ":", "if", "isinstance", "(", "accept_encoding", ",", "str", ")", ":", "pass", "elif", "isinstance", "(", "accept_encoding", ",", "list", ")", ":", "accept_encoding", "=", "','", ".", "join", "(", "accept_encoding", ")", "else", ":", "accept_encoding", "=", "'gzip,deflate'", "headers", "[", "'accept-encoding'", "]", "=", "accept_encoding", "if", "user_agent", ":", "headers", "[", "'user-agent'", "]", "=", "user_agent", "if", "keep_alive", ":", "headers", "[", "'connection'", "]", "=", "'keep-alive'", "if", "basic_auth", ":", "headers", "[", "'authorization'", "]", "=", "(", "'Basic '", "+", "b64encode", "(", "six", ".", "b", "(", "basic_auth", ")", ")", ".", "decode", "(", "'utf-8'", ")", ")", "if", "proxy_basic_auth", ":", "headers", "[", "'proxy-authorization'", "]", "=", "(", "'Basic '", "+", "b64encode", "(", "six", ".", "b", "(", "proxy_basic_auth", ")", ")", ".", "decode", "(", "'utf-8'", ")", ")", "return", "headers" ]
shortcuts for generating request headers .
train
true
54,808
def safe_value(name, value): if (name.lower() in LOGGER_SENSITIVE_HEADERS): prefix_length = logger_settings.get('reveal_sensitive_prefix', 16) prefix_length = int(min(prefix_length, ((len(value) ** 2) / 32), (len(value) / 2))) redacted_value = value[0:prefix_length] return (redacted_value + '...') return value
[ "def", "safe_value", "(", "name", ",", "value", ")", ":", "if", "(", "name", ".", "lower", "(", ")", "in", "LOGGER_SENSITIVE_HEADERS", ")", ":", "prefix_length", "=", "logger_settings", ".", "get", "(", "'reveal_sensitive_prefix'", ",", "16", ")", "prefix_length", "=", "int", "(", "min", "(", "prefix_length", ",", "(", "(", "len", "(", "value", ")", "**", "2", ")", "/", "32", ")", ",", "(", "len", "(", "value", ")", "/", "2", ")", ")", ")", "redacted_value", "=", "value", "[", "0", ":", "prefix_length", "]", "return", "(", "redacted_value", "+", "'...'", ")", "return", "value" ]
only show up to logger_settings[reveal_sensitive_prefix] characters from a sensitive header .
train
false
54,811
def dimension_mul(a, b): if ((a == datashape.var) or (b == datashape.var)): return datashape.var if isinstance(a, Fixed): a = int(a) if isinstance(b, Fixed): b = int(b) return int((a * b))
[ "def", "dimension_mul", "(", "a", ",", "b", ")", ":", "if", "(", "(", "a", "==", "datashape", ".", "var", ")", "or", "(", "b", "==", "datashape", ".", "var", ")", ")", ":", "return", "datashape", ".", "var", "if", "isinstance", "(", "a", ",", "Fixed", ")", ":", "a", "=", "int", "(", "a", ")", "if", "isinstance", "(", "b", ",", "Fixed", ")", ":", "b", "=", "int", "(", "b", ")", "return", "int", "(", "(", "a", "*", "b", ")", ")" ]
given b number of as how big is our dimension? .
train
false
54,812
def line_search_armijo(f, xk, pk, gfk, old_fval, args=(), c1=0.0001, alpha0=1): xk = np.atleast_1d(xk) fc = [0] def phi(alpha1): fc[0] += 1 return f((xk + (alpha1 * pk)), *args) if (old_fval is None): phi0 = phi(0.0) else: phi0 = old_fval derphi0 = np.dot(gfk, pk) (alpha, phi1) = scalar_search_armijo(phi, phi0, derphi0, c1=c1, alpha0=alpha0) return (alpha, fc[0], phi1)
[ "def", "line_search_armijo", "(", "f", ",", "xk", ",", "pk", ",", "gfk", ",", "old_fval", ",", "args", "=", "(", ")", ",", "c1", "=", "0.0001", ",", "alpha0", "=", "1", ")", ":", "xk", "=", "np", ".", "atleast_1d", "(", "xk", ")", "fc", "=", "[", "0", "]", "def", "phi", "(", "alpha1", ")", ":", "fc", "[", "0", "]", "+=", "1", "return", "f", "(", "(", "xk", "+", "(", "alpha1", "*", "pk", ")", ")", ",", "*", "args", ")", "if", "(", "old_fval", "is", "None", ")", ":", "phi0", "=", "phi", "(", "0.0", ")", "else", ":", "phi0", "=", "old_fval", "derphi0", "=", "np", ".", "dot", "(", "gfk", ",", "pk", ")", "(", "alpha", ",", "phi1", ")", "=", "scalar_search_armijo", "(", "phi", ",", "phi0", ",", "derphi0", ",", "c1", "=", "c1", ",", "alpha0", "=", "alpha0", ")", "return", "(", "alpha", ",", "fc", "[", "0", "]", ",", "phi1", ")" ]
minimize over alpha .
train
true
54,813
def _replication_request(command, host=None, core_name=None, params=None): params = ([] if (params is None) else params) extra = (['command={0}'.format(command)] + params) url = _format_url('replication', host=host, core_name=core_name, extra=extra) return _http_request(url)
[ "def", "_replication_request", "(", "command", ",", "host", "=", "None", ",", "core_name", "=", "None", ",", "params", "=", "None", ")", ":", "params", "=", "(", "[", "]", "if", "(", "params", "is", "None", ")", "else", "params", ")", "extra", "=", "(", "[", "'command={0}'", ".", "format", "(", "command", ")", "]", "+", "params", ")", "url", "=", "_format_url", "(", "'replication'", ",", "host", "=", "host", ",", "core_name", "=", "core_name", ",", "extra", "=", "extra", ")", "return", "_http_request", "(", "url", ")" ]
private method performs the requested replication command and returns a dictionary with success .
train
true