id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
30,969
def login_redirect(application_url, continue_url, start_response): if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format(AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) else: hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format(hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)]) return []
[ "def", "login_redirect", "(", "application_url", ",", "continue_url", ",", "start_response", ")", ":", "if", "AppDashboardHelper", ".", "USE_SHIBBOLETH", ":", "redirect_url", "=", "'{0}:{1}/login?{2}={3}'", ".", "format", "(", "AppDashboardHelper", ".", "SHIBBOLETH_CONN...
load current redirect to context .
train
false
30,970
def campaign_message(): return s3_rest_controller()
[ "def", "campaign_message", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
30,971
def show_analytics_dashboard_message(course_key): if hasattr(course_key, 'ccx'): ccx_analytics_enabled = settings.FEATURES.get('ENABLE_CCX_ANALYTICS_DASHBOARD_URL', False) return (settings.ANALYTICS_DASHBOARD_URL and ccx_analytics_enabled) return settings.ANALYTICS_DASHBOARD_URL
[ "def", "show_analytics_dashboard_message", "(", "course_key", ")", ":", "if", "hasattr", "(", "course_key", ",", "'ccx'", ")", ":", "ccx_analytics_enabled", "=", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_CCX_ANALYTICS_DASHBOARD_URL'", ",", "False", ")",...
defines whether or not the analytics dashboard url should be displayed .
train
false
30,972
def _valid(m, comment=VALID_RESPONSE, out=None): return _set_status(m, status=True, comment=comment, out=out)
[ "def", "_valid", "(", "m", ",", "comment", "=", "VALID_RESPONSE", ",", "out", "=", "None", ")", ":", "return", "_set_status", "(", "m", ",", "status", "=", "True", ",", "comment", "=", "comment", ",", "out", "=", "out", ")" ]
return valid status .
train
true
30,973
def source_ranges_match(original_file_dict, diff_dict, original_result_diff_dict, modified_result_diff_dict, renamed_files): for file_name in original_file_dict: try: original_total_diff = (diff_dict[file_name] + original_result_diff_dict[file_name]) except ConflictError: return False original_total_file = original_total_diff.modified modified_total_file = modified_result_diff_dict[renamed_files.get(file_name, file_name)].modified if (original_total_file != modified_total_file): return False return True
[ "def", "source_ranges_match", "(", "original_file_dict", ",", "diff_dict", ",", "original_result_diff_dict", ",", "modified_result_diff_dict", ",", "renamed_files", ")", ":", "for", "file_name", "in", "original_file_dict", ":", "try", ":", "original_total_diff", "=", "(...
checks whether the sourceranges of two results match .
train
false
30,974
def literal(value, type_=None): return BindParameter(None, value, type_=type_, unique=True)
[ "def", "literal", "(", "value", ",", "type_", "=", "None", ")", ":", "return", "BindParameter", "(", "None", ",", "value", ",", "type_", "=", "type_", ",", "unique", "=", "True", ")" ]
return a literal clause .
train
false
30,975
def get_pending_domain_join(): vname = '(Default)' base_key = 'SYSTEM\\CurrentControlSet\\Services\\Netlogon' avoid_key = '{0}\\AvoidSpnSet'.format(base_key) join_key = '{0}\\JoinDomain'.format(base_key) avoid_reg_ret = __salt__['reg.read_value']('HKLM', avoid_key, vname) if avoid_reg_ret['success']: log.debug('Found key: %s', avoid_key) return True else: log.debug('Unable to access key: %s', avoid_key) join_reg_ret = __salt__['reg.read_value']('HKLM', join_key, vname) if join_reg_ret['success']: log.debug('Found key: %s', join_key) return True else: log.debug('Unable to access key: %s', join_key) return False
[ "def", "get_pending_domain_join", "(", ")", ":", "vname", "=", "'(Default)'", "base_key", "=", "'SYSTEM\\\\CurrentControlSet\\\\Services\\\\Netlogon'", "avoid_key", "=", "'{0}\\\\AvoidSpnSet'", ".", "format", "(", "base_key", ")", "join_key", "=", "'{0}\\\\JoinDomain'", "...
determine whether there is a pending domain join action that requires a reboot .
train
false
30,976
def test_regex_bad_pattern(): assert_raises(re.error, inputs.regex, '[')
[ "def", "test_regex_bad_pattern", "(", ")", ":", "assert_raises", "(", "re", ".", "error", ",", "inputs", ".", "regex", ",", "'['", ")" ]
regex error raised immediately when regex input parser is created .
train
false
30,977
def CommaSeparatedTokenizer(): return (RegexTokenizer('[^,]+') | StripFilter())
[ "def", "CommaSeparatedTokenizer", "(", ")", ":", "return", "(", "RegexTokenizer", "(", "'[^,]+'", ")", "|", "StripFilter", "(", ")", ")" ]
splits tokens by commas .
train
false
30,978
def index_alt(): s3_redirect_default(URL(f='person'))
[ "def", "index_alt", "(", ")", ":", "s3_redirect_default", "(", "URL", "(", "f", "=", "'person'", ")", ")" ]
module homepage for non-admin users when no cms content found .
train
false
30,980
def find_best_app(module): from . import Flask for attr_name in ('app', 'application'): app = getattr(module, attr_name, None) if ((app is not None) and isinstance(app, Flask)): return app matches = [v for (k, v) in iteritems(module.__dict__) if isinstance(v, Flask)] if (len(matches) == 1): return matches[0] raise NoAppException(('Failed to find application in module "%s". Are you sure it contains a Flask application? Maybe you wrapped it in a WSGI middleware or you are using a factory function.' % module.__name__))
[ "def", "find_best_app", "(", "module", ")", ":", "from", ".", "import", "Flask", "for", "attr_name", "in", "(", "'app'", ",", "'application'", ")", ":", "app", "=", "getattr", "(", "module", ",", "attr_name", ",", "None", ")", "if", "(", "(", "app", ...
given a module instance this tries to find the best possible application in the module or raises an exception .
train
false
30,981
def make_man_update_target(manpage): man_dir = pjoin('docs', 'man') manpage_gz = (manpage + '.gz') manpath = pjoin(man_dir, manpage) manpath_gz = pjoin(man_dir, manpage_gz) gz_cmd = ('cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s' % locals()) return (manpath_gz, [manpath], gz_cmd)
[ "def", "make_man_update_target", "(", "manpage", ")", ":", "man_dir", "=", "pjoin", "(", "'docs'", ",", "'man'", ")", "manpage_gz", "=", "(", "manpage", "+", "'.gz'", ")", "manpath", "=", "pjoin", "(", "man_dir", ",", "manpage", ")", "manpath_gz", "=", "...
return a target_update-compliant tuple for the given manpage .
train
false
30,982
def p_atom_name(p): p[0] = ast.Name(p[1])
[ "def", "p_atom_name", "(", "p", ")", ":", "p", "[", "0", "]", "=", "ast", ".", "Name", "(", "p", "[", "1", "]", ")" ]
atom : name .
train
false
30,983
def get_stream_type(env, args): if ((not env.stdout_isatty) and (not args.prettify)): Stream = partial(RawStream, chunk_size=(RawStream.CHUNK_SIZE_BY_LINE if args.stream else RawStream.CHUNK_SIZE)) elif args.prettify: Stream = partial((PrettyStream if args.stream else BufferedPrettyStream), env=env, conversion=Conversion(), formatting=Formatting(env=env, groups=args.prettify, color_scheme=args.style, explicit_json=args.json)) else: Stream = partial(EncodedStream, env=env) return Stream
[ "def", "get_stream_type", "(", "env", ",", "args", ")", ":", "if", "(", "(", "not", "env", ".", "stdout_isatty", ")", "and", "(", "not", "args", ".", "prettify", ")", ")", ":", "Stream", "=", "partial", "(", "RawStream", ",", "chunk_size", "=", "(", ...
pick the right stream type based on env and args .
train
false
30,984
def stacksize(since=0.0): return (_VmB('VmStk:') - since)
[ "def", "stacksize", "(", "since", "=", "0.0", ")", ":", "return", "(", "_VmB", "(", "'VmStk:'", ")", "-", "since", ")" ]
return stack size in bytes .
train
false
30,988
def test_ros_fit_invalid_ratio(): ratio = (1.0 / 10000.0) ros = RandomOverSampler(ratio=ratio, random_state=RND_SEED) assert_raises(RuntimeError, ros.fit, X, Y)
[ "def", "test_ros_fit_invalid_ratio", "(", ")", ":", "ratio", "=", "(", "1.0", "/", "10000.0", ")", "ros", "=", "RandomOverSampler", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "ros", ".", ...
test either if an error is raised when the balancing ratio to fit is smaller than the one of the data .
train
false
30,991
def task_track(request_info, task_info, event_type, event, page=None): full_event = dict(event, **task_info) with eventtracker.get_tracker().context('edx.course.task', contexts.course_context_from_url(page)): event = {'username': request_info.get('username', 'unknown'), 'ip': request_info.get('ip', 'unknown'), 'event_source': 'task', 'event_type': event_type, 'event': full_event, 'agent': request_info.get('agent', 'unknown'), 'page': page, 'time': datetime.datetime.utcnow(), 'host': request_info.get('host', 'unknown'), 'context': eventtracker.get_tracker().resolve_context()} log_event(event)
[ "def", "task_track", "(", "request_info", ",", "task_info", ",", "event_type", ",", "event", ",", "page", "=", "None", ")", ":", "full_event", "=", "dict", "(", "event", ",", "**", "task_info", ")", "with", "eventtracker", ".", "get_tracker", "(", ")", "...
logs tracking information for events occuring within celery tasks .
train
false
30,993
def test_start_logging(fake_proc, caplog): cmd = 'does_not_exist' args = ['arg', 'arg with spaces'] with caplog.at_level(logging.DEBUG): fake_proc.start(cmd, args) msgs = [e.msg for e in caplog.records] assert (msgs == ['Starting process.', "Executing: does_not_exist arg 'arg with spaces'"])
[ "def", "test_start_logging", "(", "fake_proc", ",", "caplog", ")", ":", "cmd", "=", "'does_not_exist'", "args", "=", "[", "'arg'", ",", "'arg with spaces'", "]", "with", "caplog", ".", "at_level", "(", "logging", ".", "DEBUG", ")", ":", "fake_proc", ".", "...
make sure that starting logs the executed commandline .
train
false
30,994
def put_user_policy(user_name, policy_name, policy_json, region=None, key=None, keyid=None, profile=None): user = get_user(user_name, region, key, keyid, profile) if (not user): log.error('User {0} does not exist'.format(user_name)) return False conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: if (not isinstance(policy_json, six.string_types)): policy_json = json.dumps(policy_json) created = conn.put_user_policy(user_name, policy_name, policy_json) if created: log.info('Created policy for user {0}.'.format(user_name)) return True msg = 'Could not create policy for user {0}.' log.error(msg.format(user_name)) except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to create policy for user {0}.' log.error(msg.format(user_name)) return False
[ "def", "put_user_policy", "(", "user_name", ",", "policy_name", ",", "policy_json", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "user", "=", "get_user", "(", "user_name", ",", "...
adds or updates the specified policy document for the specified user .
train
true
30,995
def camera(): return load('camera.png')
[ "def", "camera", "(", ")", ":", "return", "load", "(", "'camera.png'", ")" ]
gray-level "camera" image .
train
false
30,996
def fasthash(string): md4 = hashlib.new('md4') md4.update(string) return md4.hexdigest()
[ "def", "fasthash", "(", "string", ")", ":", "md4", "=", "hashlib", ".", "new", "(", "'md4'", ")", "md4", ".", "update", "(", "string", ")", "return", "md4", ".", "hexdigest", "(", ")" ]
hashes string into a string representation of a 128-bit digest .
train
false
30,997
def stop_zookeeper(zk_ips, keyname): logging.info('Stopping ZooKeeper...') for ip in zk_ips: stop_service_cmd = (STOP_SERVICE_SCRIPT + ZK_WATCH_NAME) try: utils.ssh(ip, keyname, stop_service_cmd) except subprocess.CalledProcessError: logging.error('Unable to stop ZooKeeper on {}'.format(ip))
[ "def", "stop_zookeeper", "(", "zk_ips", ",", "keyname", ")", ":", "logging", ".", "info", "(", "'Stopping ZooKeeper...'", ")", "for", "ip", "in", "zk_ips", ":", "stop_service_cmd", "=", "(", "STOP_SERVICE_SCRIPT", "+", "ZK_WATCH_NAME", ")", "try", ":", "utils"...
stops zookeeper .
train
false
30,998
@cleanup def test__EventCollection__get_color(): (_, coll, props) = generate_EventCollection_plot() np.testing.assert_array_equal(props[u'color'], coll.get_color()) check_allprop_array(coll.get_colors(), props[u'color'])
[ "@", "cleanup", "def", "test__EventCollection__get_color", "(", ")", ":", "(", "_", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "np", ".", "testing", ".", "assert_array_equal", "(", "props", "[", "u'color'", "]", ",", "col...
check to make sure the default color matches the input color .
train
false
30,999
def renderScene(programObj, width, height): gl.glMatrixMode(gl.GL_PROJECTION) gl.glLoadIdentity() glu.gluOrtho2D(0, width, 0, height) gl.glViewport(0, 0, width, height) gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() gl.glUseProgram(programObj) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex2f(0.0, 0.0) gl.glTexCoord2f(1.0, 0.0) gl.glVertex2f(float(width), 0.0) gl.glTexCoord2f(1.0, 1.0) gl.glVertex2f(float(width), float(height)) gl.glTexCoord2f(0.0, 1.0) gl.glVertex2f(0.0, float(height)) gl.glEnd() gl.glUseProgram(0) checkGLError()
[ "def", "renderScene", "(", "programObj", ",", "width", ",", "height", ")", ":", "gl", ".", "glMatrixMode", "(", "gl", ".", "GL_PROJECTION", ")", "gl", ".", "glLoadIdentity", "(", ")", "glu", ".", "gluOrtho2D", "(", "0", ",", "width", ",", "0", ",", "...
renderscene - the opengl commands to render our scene .
train
false
31,000
def protect_filename(s): if (set(s) & set(PROTECTABLES)): if (sys.platform == 'win32'): return (('"' + s) + '"') else: return ''.join(((('\\' + c) if (c in PROTECTABLES) else c) for c in s)) else: return s
[ "def", "protect_filename", "(", "s", ")", ":", "if", "(", "set", "(", "s", ")", "&", "set", "(", "PROTECTABLES", ")", ")", ":", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "return", "(", "(", "'\"'", "+", "s", ")", "+", "'\"'"...
escape a string to protect certain characters .
train
false
31,001
def absurl(fragment): root = settings.MEDIA_URL root += (((root[(-1):] != '/') and '/') or '') return urlparse.urljoin(root, fragment)
[ "def", "absurl", "(", "fragment", ")", ":", "root", "=", "settings", ".", "MEDIA_URL", "root", "+=", "(", "(", "(", "root", "[", "(", "-", "1", ")", ":", "]", "!=", "'/'", ")", "and", "'/'", ")", "or", "''", ")", "return", "urlparse", ".", "url...
create an absolute url based on media_url .
train
false
31,002
def compareExecutionOrderAscending(module, otherModule): if (module.globalExecutionOrder < otherModule.globalExecutionOrder): return (-1) if (module.globalExecutionOrder > otherModule.globalExecutionOrder): return 1 if (module.__name__ < otherModule.__name__): return (-1) return int((module.__name__ > otherModule.__name__))
[ "def", "compareExecutionOrderAscending", "(", "module", ",", "otherModule", ")", ":", "if", "(", "module", ".", "globalExecutionOrder", "<", "otherModule", ".", "globalExecutionOrder", ")", ":", "return", "(", "-", "1", ")", "if", "(", "module", ".", "globalEx...
get comparison in order to sort modules in ascending execution order .
train
false
31,003
def sort_stats(stats, sortedby=None, tree=False, reverse=True): if (sortedby is None): return stats if ((sortedby == 'io_counters') and (not tree)): try: stats.sort(key=(lambda process: (((process[sortedby][0] - process[sortedby][2]) + process[sortedby][1]) - process[sortedby][3])), reverse=reverse) except Exception: stats.sort(key=operator.itemgetter('cpu_percent'), reverse=reverse) elif tree: stats.set_sorting(sortedby, reverse) else: try: stats.sort(key=operator.itemgetter(sortedby), reverse=reverse) except (KeyError, TypeError): stats.sort(key=operator.itemgetter('name'), reverse=False) return stats
[ "def", "sort_stats", "(", "stats", ",", "sortedby", "=", "None", ",", "tree", "=", "False", ",", "reverse", "=", "True", ")", ":", "if", "(", "sortedby", "is", "None", ")", ":", "return", "stats", "if", "(", "(", "sortedby", "==", "'io_counters'", ")...
return the stats sorted by reverse the sort if reverse is true .
train
false
31,004
def Binary(x): if PY2: return bytearray(x) else: return bytes(x)
[ "def", "Binary", "(", "x", ")", ":", "if", "PY2", ":", "return", "bytearray", "(", "x", ")", "else", ":", "return", "bytes", "(", "x", ")" ]
this function constructs an object capable of holding a binary string value .
train
false
31,005
def _replace_by(module_function, package=__package__, warn=False): try: from importlib import import_module except ImportError: warnings.warn('could not import module importlib') return (lambda func: func) def decorate(func, module_function=module_function, warn=warn): try: (module, function) = module_function.split('.') if package: module = import_module(('.' + module), package=package) else: module = import_module(module) (func, oldfunc) = (getattr(module, function), func) globals()[('__old_' + func.__name__)] = oldfunc except Exception: if warn: warnings.warn(('failed to import %s' % module_function)) return func return decorate
[ "def", "_replace_by", "(", "module_function", ",", "package", "=", "__package__", ",", "warn", "=", "False", ")", ":", "try", ":", "from", "importlib", "import", "import_module", "except", "ImportError", ":", "warnings", ".", "warn", "(", "'could not import modu...
try replace decorated function by module .
train
false
31,006
def revoke_access(course, user, level, send_email=True): _change_access(course, user, level, 'revoke', send_email)
[ "def", "revoke_access", "(", "course", ",", "user", ",", "level", ",", "send_email", "=", "True", ")", ":", "_change_access", "(", "course", ",", "user", ",", "level", ",", "'revoke'", ",", "send_email", ")" ]
revoke access from user to course modification .
train
false
31,007
def add_base(paths): return [os.path.join(BASEDIR, x) for x in paths]
[ "def", "add_base", "(", "paths", ")", ":", "return", "[", "os", ".", "path", ".", "join", "(", "BASEDIR", ",", "x", ")", "for", "x", "in", "paths", "]" ]
returns a list of paths relative to basedir .
train
false
31,009
def create_nrt_module(ctx): codegen = ctx.codegen() library = codegen.create_library('nrt') ir_mod = library.create_ir_module('nrt_module') atomic_inc = _define_atomic_inc_dec(ir_mod, 'add', ordering='monotonic') atomic_dec = _define_atomic_inc_dec(ir_mod, 'sub', ordering='monotonic') _define_atomic_cas(ir_mod, ordering='monotonic') _define_nrt_meminfo_data(ir_mod) _define_nrt_incref(ir_mod, atomic_inc) _define_nrt_decref(ir_mod, atomic_dec) _define_nrt_unresolved_abort(ctx, ir_mod) return (ir_mod, library)
[ "def", "create_nrt_module", "(", "ctx", ")", ":", "codegen", "=", "ctx", ".", "codegen", "(", ")", "library", "=", "codegen", ".", "create_library", "(", "'nrt'", ")", "ir_mod", "=", "library", ".", "create_ir_module", "(", "'nrt_module'", ")", "atomic_inc",...
create an ir module defining the llvm nrt functions .
train
false
31,010
def get_object_traceback(obj): frames = _get_object_traceback(obj) if (frames is not None): return Traceback(frames) else: return None
[ "def", "get_object_traceback", "(", "obj", ")", ":", "frames", "=", "_get_object_traceback", "(", "obj", ")", "if", "(", "frames", "is", "not", "None", ")", ":", "return", "Traceback", "(", "frames", ")", "else", ":", "return", "None" ]
get the traceback where the python object *obj* was allocated .
train
false
31,011
@pytest.fixture(scope='session') def doctest_namespace(): return dict()
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'session'", ")", "def", "doctest_namespace", "(", ")", ":", "return", "dict", "(", ")" ]
inject names into the doctest namespace .
train
false
31,012
def is_layer_attribute_aggregable(store_type, field_name, field_type): if (store_type != 'dataStore'): return False if (field_type not in LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES): return False if (field_name.lower() in ['id', 'identifier']): return False return True
[ "def", "is_layer_attribute_aggregable", "(", "store_type", ",", "field_name", ",", "field_type", ")", ":", "if", "(", "store_type", "!=", "'dataStore'", ")", ":", "return", "False", "if", "(", "field_type", "not", "in", "LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES", ")", "...
decipher whether layer attribute is suitable for statistical derivation .
train
false
31,013
def attach_private_projects_same_owner(queryset, user, as_field='private_projects_same_owner_attr'): model = queryset.model if ((user is None) or user.is_anonymous()): sql = 'SELECT 0' else: sql = '\n SELECT COUNT(id)\n FROM projects_project p_aux\n WHERE p_aux.is_private = True AND\n p_aux.owner_id = {tbl}.owner_id\n ' sql = sql.format(tbl=model._meta.db_table, user_id=user.id) queryset = queryset.extra(select={as_field: sql}) return queryset
[ "def", "attach_private_projects_same_owner", "(", "queryset", ",", "user", ",", "as_field", "=", "'private_projects_same_owner_attr'", ")", ":", "model", "=", "queryset", ".", "model", "if", "(", "(", "user", "is", "None", ")", "or", "user", ".", "is_anonymous",...
attach a private projects counter to each object of the queryset .
train
false
31,015
def generate_fathead(folder, processes=CPU_COUNT): docs_folder = op.abspath(folder) files_to_parse = glob.glob(op.join(docs_folder, '*.html')) pool = Pool(processes) output = pool.map(parse_file, files_to_parse) with open(OUTPUT_FILE, 'w') as fp: fp.writelines(output)
[ "def", "generate_fathead", "(", "folder", ",", "processes", "=", "CPU_COUNT", ")", ":", "docs_folder", "=", "op", ".", "abspath", "(", "folder", ")", "files_to_parse", "=", "glob", ".", "glob", "(", "op", ".", "join", "(", "docs_folder", ",", "'*.html'", ...
process documentation and write fathead file .
train
false
31,016
def kaminario_logger(func): def func_wrapper(*args, **kwargs): LOG.debug('Entering %(function)s of %(class)s with arguments: %(args)s, %(kwargs)s', {'class': args[0].__class__.__name__, 'function': func.__name__, 'args': args[1:], 'kwargs': kwargs}) ret = func(*args, **kwargs) LOG.debug('Exiting %(function)s of %(class)s having return value: %(ret)s', {'class': args[0].__class__.__name__, 'function': func.__name__, 'ret': ret}) return ret return func_wrapper
[ "def", "kaminario_logger", "(", "func", ")", ":", "def", "func_wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "LOG", ".", "debug", "(", "'Entering %(function)s of %(class)s with arguments: %(args)s, %(kwargs)s'", ",", "{", "'class'", ":", "args", "[",...
return a function wrapper .
train
false
31,018
def mmap_readwrite_private(f, sz=0, close=True): return _mmap_do(f, sz, mmap.MAP_PRIVATE, (mmap.PROT_READ | mmap.PROT_WRITE), close)
[ "def", "mmap_readwrite_private", "(", "f", ",", "sz", "=", "0", ",", "close", "=", "True", ")", ":", "return", "_mmap_do", "(", "f", ",", "sz", ",", "mmap", ".", "MAP_PRIVATE", ",", "(", "mmap", ".", "PROT_READ", "|", "mmap", ".", "PROT_WRITE", ")", ...
create a read-write memory mapped region on file f .
train
false
31,020
def quota_create(context, project_id, resource, limit, user_id=None): return IMPL.quota_create(context, project_id, resource, limit, user_id=user_id)
[ "def", "quota_create", "(", "context", ",", "project_id", ",", "resource", ",", "limit", ",", "user_id", "=", "None", ")", ":", "return", "IMPL", ".", "quota_create", "(", "context", ",", "project_id", ",", "resource", ",", "limit", ",", "user_id", "=", ...
create a quota for the given project and resource .
train
false
31,021
def flateDecode(stream, parameters): decodedStream = '' try: decodedStream = zlib.decompress(stream) except: return ((-1), 'Error decompressing string') if ((parameters == None) or (parameters == {})): return (0, decodedStream) else: if parameters.has_key('/Predictor'): predictor = parameters['/Predictor'].getRawValue() else: predictor = 1 if parameters.has_key('/Columns'): columns = parameters['/Columns'].getRawValue() else: columns = 1 if parameters.has_key('/Colors'): colors = parameters['/Colors'].getRawValue() if (colors < 1): colors = 1 else: colors = 1 if parameters.has_key('/BitsPerComponent'): bits = parameters['/BitsPerComponent'].getRawValue() if (bits not in [1, 2, 4, 8, 16]): bits = 8 else: bits = 8 if ((predictor != None) and (predictor != 1)): ret = post_prediction(decodedStream, predictor, columns, colors, bits) return ret else: return (0, decodedStream)
[ "def", "flateDecode", "(", "stream", ",", "parameters", ")", ":", "decodedStream", "=", "''", "try", ":", "decodedStream", "=", "zlib", ".", "decompress", "(", "stream", ")", "except", ":", "return", "(", "(", "-", "1", ")", ",", "'Error decompressing stri...
method to decode streams using the flate algorithm .
train
false
31,022
@declared def get_custom(obj_ref, cname, default=None): obj = get_objects(obj_ref) if (not obj): return default cname = cname.upper().strip() if (not cname.startswith('_')): cname = ('_' + cname) return obj.customs.get(cname, default)
[ "@", "declared", "def", "get_custom", "(", "obj_ref", ",", "cname", ",", "default", "=", "None", ")", ":", "obj", "=", "get_objects", "(", "obj_ref", ")", "if", "(", "not", "obj", ")", ":", "return", "default", "cname", "=", "cname", ".", "upper", "(...
get custom varialbe from a service or a host .
train
false
31,024
def clean_tag(data): return escape_html(data).replace('"', '&quot;').replace("'", '&#39')
[ "def", "clean_tag", "(", "data", ")", ":", "return", "escape_html", "(", "data", ")", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", ".", "replace", "(", "\"'\"", ",", "'&#39'", ")" ]
format as a valid tag .
train
false
31,026
def MakeHistFromList(t, label=None): return Hist(t, label=label)
[ "def", "MakeHistFromList", "(", "t", ",", "label", "=", "None", ")", ":", "return", "Hist", "(", "t", ",", "label", "=", "label", ")" ]
makes a histogram from an unsorted sequence of values .
train
false
31,027
def _center_and_normalize_points(points): centroid = np.mean(points, axis=0) rms = math.sqrt((np.sum(((points - centroid) ** 2)) / points.shape[0])) norm_factor = (math.sqrt(2) / rms) matrix = np.array([[norm_factor, 0, ((- norm_factor) * centroid[0])], [0, norm_factor, ((- norm_factor) * centroid[1])], [0, 0, 1]]) pointsh = np.row_stack([points.T, np.ones(points.shape[0])]) new_pointsh = np.dot(matrix, pointsh).T new_points = new_pointsh[:, :2] new_points[:, 0] /= new_pointsh[:, 2] new_points[:, 1] /= new_pointsh[:, 2] return (matrix, new_points)
[ "def", "_center_and_normalize_points", "(", "points", ")", ":", "centroid", "=", "np", ".", "mean", "(", "points", ",", "axis", "=", "0", ")", "rms", "=", "math", ".", "sqrt", "(", "(", "np", ".", "sum", "(", "(", "(", "points", "-", "centroid", ")...
center and normalize image points .
train
false
31,028
def decode_tbs(byts, flag_size=4): byts = bytes(byts) (val, flags, consumed) = decode_fvwi(byts, flag_size=flag_size) extra = {} byts = byts[consumed:] if ((flags & 8) and (flag_size > 3)): extra[8] = True if (flags & 2): (x, consumed2) = decint(byts) byts = byts[consumed2:] extra[2] = x consumed += consumed2 if (flags & 4): extra[4] = ord(byts[0]) byts = byts[1:] consumed += 1 if (flags & 1): (x, consumed2) = decint(byts) byts = byts[consumed2:] extra[1] = x consumed += consumed2 return (val, extra, consumed)
[ "def", "decode_tbs", "(", "byts", ",", "flag_size", "=", "4", ")", ":", "byts", "=", "bytes", "(", "byts", ")", "(", "val", ",", "flags", ",", "consumed", ")", "=", "decode_fvwi", "(", "byts", ",", "flag_size", "=", "flag_size", ")", "extra", "=", ...
trailing byte sequences for indexing consists of series of fvwi numbers .
train
false
31,031
@pytest.fixture def hug_api(): api = TestAPI('fake_api_{}'.format(randint(0, 1000000))) api.route = Routers(hug.routing.URLRouter().api(api), hug.routing.LocalRouter().api(api), hug.routing.CLIRouter().api(api)) return api
[ "@", "pytest", ".", "fixture", "def", "hug_api", "(", ")", ":", "api", "=", "TestAPI", "(", "'fake_api_{}'", ".", "format", "(", "randint", "(", "0", ",", "1000000", ")", ")", ")", "api", ".", "route", "=", "Routers", "(", "hug", ".", "routing", "....
defines a dependency for and then includes a uniquely identified hug api for a single test case .
train
false
31,033
def mask_quotes(input): search_re = re.compile(('(.*?)' + QUOTE_RE_STR)) return [search_re.sub(quote_replace, line) for line in input]
[ "def", "mask_quotes", "(", "input", ")", ":", "search_re", "=", "re", ".", "compile", "(", "(", "'(.*?)'", "+", "QUOTE_RE_STR", ")", ")", "return", "[", "search_re", ".", "sub", "(", "quote_replace", ",", "line", ")", "for", "line", "in", "input", "]" ...
mask the quoted strings so we skip braces inside quoted strings .
train
false
31,034
def _list_help(object_name, path=(), parent_object_names=()): items = graph_reference.ARRAYS[object_name]['items'] items_classes = set() for item in items: if (item in graph_reference.OBJECT_NAME_TO_CLASS_NAME): items_classes.add(graph_reference.string_to_class_name(item)) else: items_classes.add('dict') items_classes = list(items_classes) items_classes.sort() lines = textwrap.wrap(repr(items_classes), width=((LINE_SIZE - TAB_SIZE) - 1)) help_dict = {'object_name': object_name, 'path_string': (('[' + ']['.join((repr(k) for k in path))) + ']'), 'parent_object_names': parent_object_names, 'items_string': (' DCTB ' + '\n DCTB '.join(lines))} return "Valid items for '{object_name}' at path {path_string} under parents {parent_object_names}:\n{items_string}\n".format(**help_dict)
[ "def", "_list_help", "(", "object_name", ",", "path", "=", "(", ")", ",", "parent_object_names", "=", "(", ")", ")", ":", "items", "=", "graph_reference", ".", "ARRAYS", "[", "object_name", "]", "[", "'items'", "]", "items_classes", "=", "set", "(", ")",...
see get_help() .
train
false
31,035
def excepthook(exctype, value, traceback): getLogger('fail2ban').critical('Unhandled exception in Fail2Ban:', exc_info=True) return sys.__excepthook__(exctype, value, traceback)
[ "def", "excepthook", "(", "exctype", ",", "value", ",", "traceback", ")", ":", "getLogger", "(", "'fail2ban'", ")", ".", "critical", "(", "'Unhandled exception in Fail2Ban:'", ",", "exc_info", "=", "True", ")", "return", "sys", ".", "__excepthook__", "(", "exc...
except hook used to log unhandled exceptions to fail2ban log .
train
false
31,036
def is_safe_url(url, host=None): if (not url): return False netloc = urlparse.urlparse(url)[1] return ((not netloc) or (netloc == host))
[ "def", "is_safe_url", "(", "url", ",", "host", "=", "None", ")", ":", "if", "(", "not", "url", ")", ":", "return", "False", "netloc", "=", "urlparse", ".", "urlparse", "(", "url", ")", "[", "1", "]", "return", "(", "(", "not", "netloc", ")", "or"...
return true if the url is a safe redirection .
train
true
31,037
@register.function def sidebar(app): from olympia.addons.models import Category if (app is None): return ([], []) qs = Category.objects.filter(application=app.id, weight__gte=0, type=amo.ADDON_EXTENSION) categories = sorted(qs, key=attrgetter('weight', 'name')) Type = collections.namedtuple('Type', 'id name url') base = urlresolvers.reverse('home') types = [Type(99, _('Collections'), (base + 'collections/'))] shown_types = {amo.ADDON_PERSONA: urlresolvers.reverse('browse.personas'), amo.ADDON_DICT: urlresolvers.reverse('browse.language-tools'), amo.ADDON_SEARCH: urlresolvers.reverse('browse.search-tools'), amo.ADDON_THEME: urlresolvers.reverse('browse.themes')} titles = dict(amo.ADDON_TYPES, **{amo.ADDON_DICT: _('Dictionaries & Language Packs')}) for (type_, url) in shown_types.items(): if (type_ in app.types): types.append(Type(type_, titles[type_], url)) return (categories, sorted(types, key=(lambda x: x.name)))
[ "@", "register", ".", "function", "def", "sidebar", "(", "app", ")", ":", "from", "olympia", ".", "addons", ".", "models", "import", "Category", "if", "(", "app", "is", "None", ")", ":", "return", "(", "[", "]", ",", "[", "]", ")", "qs", "=", "Ca...
yaml: sidebar allows you to add links in the sidebar .
train
false
31,038
def update_assume_role_policy(role_name, policy_document, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if isinstance(policy_document, six.string_types): policy_document = json.loads(policy_document, object_pairs_hook=odict.OrderedDict) try: _policy_document = json.dumps(policy_document) conn.update_assume_role_policy(role_name, _policy_document) msg = 'Successfully updated assume role policy for role {0}.' log.info(msg.format(role_name)) return True except boto.exception.BotoServerError as e: log.error(e) msg = 'Failed to update assume role policy for role {0}.' log.error(msg.format(role_name)) return False
[ "def", "update_assume_role_policy", "(", "role_name", ",", "policy_document", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", "...
update an assume role policy for a role .
train
true
31,040
def persist_model(): model = generate_model() PERSISTED_MODEL.setContent(dumps({u'version': __version__, u'model': model}, sort_keys=True, indent=4, separators=(',', ': ')))
[ "def", "persist_model", "(", ")", ":", "model", "=", "generate_model", "(", ")", "PERSISTED_MODEL", ".", "setContent", "(", "dumps", "(", "{", "u'version'", ":", "__version__", ",", "u'model'", ":", "model", "}", ",", "sort_keys", "=", "True", ",", "indent...
store the current model to disk .
train
false
31,041
def _copy_headers(src, dest): for (k, v) in src.items(): if (is_sys_or_user_meta('object', k) or is_object_transient_sysmeta(k) or (k.lower() == 'x-delete-at')): dest[k] = v
[ "def", "_copy_headers", "(", "src", ",", "dest", ")", ":", "for", "(", "k", ",", "v", ")", "in", "src", ".", "items", "(", ")", ":", "if", "(", "is_sys_or_user_meta", "(", "'object'", ",", "k", ")", "or", "is_object_transient_sysmeta", "(", "k", ")",...
will copy desired headers from src to dest .
train
false
31,042
def remove_non_release_groups(name): if (not name): return name removeWordsList = {u'\\[rartv\\]$': u'searchre', u'\\[rarbg\\]$': u'searchre', u'\\[eztv\\]$': u'searchre', u'\\[ettv\\]$': u'searchre', u'\\[cttv\\]$': u'searchre', u'\\[vtv\\]$': u'searchre', u'\\[EtHD\\]$': u'searchre', u'\\[GloDLS\\]$': u'searchre', u'\\[silv4\\]$': u'searchre', u'\\[Seedbox\\]$': u'searchre', u'\\[PublicHD\\]$': u'searchre', u'\\[AndroidTwoU\\]$': u'searchre', u'\\.\\[BT\\]$': u'searchre', u' \\[1044\\]$': u'searchre', u'\\.RiPSaLoT$': u'searchre', u'\\.GiuseppeTnT$': u'searchre', u'\\.Renc$': u'searchre', u'-NZBGEEK$': u'searchre', u'-Siklopentan$': u'searchre', u'-\\[SpastikusTV\\]$': u'searchre', u'-RP$': u'searchre', u'-20-40$': u'searchre', u'\\.\\[www\\.usabit\\.com\\]$': u'searchre', u'^\\[www\\.Cpasbien\\.pe\\] ': u'searchre', u'^\\[www\\.Cpasbien\\.com\\] ': u'searchre', u'^\\[ www\\.Cpasbien\\.pw \\] ': u'searchre', u'^\\.www\\.Cpasbien\\.pw': u'searchre', u'^\\[www\\.newpct1\\.com\\]': u'searchre', u'^\\[ www\\.Cpasbien\\.com \\] ': u'searchre', u'- \\{ www\\.SceneTime\\.com \\}$': u'searchre', u'^\\{ www\\.SceneTime\\.com \\} - ': u'searchre', u'^\\]\\.\\[www\\.tensiontorrent.com\\] - ': u'searchre', u'^\\]\\.\\[ www\\.tensiontorrent.com \\] - ': u'searchre', u'- \\[ www\\.torrentday\\.com \\]$': u'searchre', u'^\\[ www\\.TorrentDay\\.com \\] - ': u'searchre', u'\\[NO-RAR\\] - \\[ www\\.torrentday\\.com \\]$': u'searchre'} _name = name for (remove_string, remove_type) in removeWordsList.items(): if (remove_type == u'search'): _name = _name.replace(remove_string, u'') elif (remove_type == u'searchre'): _name = re.sub((u'(?i)' + remove_string), u'', _name) return _name.strip(u'.- []{}')
[ "def", "remove_non_release_groups", "(", "name", ")", ":", "if", "(", "not", "name", ")", ":", "return", "name", "removeWordsList", "=", "{", "u'\\\\[rartv\\\\]$'", ":", "u'searchre'", ",", "u'\\\\[rarbg\\\\]$'", ":", "u'searchre'", ",", "u'\\\\[eztv\\\\]$'", ":",...
remove non release groups from name .
train
false
31,043
def getEvaluatedFloatByKeys(defaultFloat, keys, xmlElement): for key in keys: defaultFloat = getEvaluatedFloatDefault(defaultFloat, key, xmlElement) return defaultFloat
[ "def", "getEvaluatedFloatByKeys", "(", "defaultFloat", ",", "keys", ",", "xmlElement", ")", ":", "for", "key", "in", "keys", ":", "defaultFloat", "=", "getEvaluatedFloatDefault", "(", "defaultFloat", ",", "key", ",", "xmlElement", ")", "return", "defaultFloat" ]
get the evaluated value as a float by keys .
train
false
31,044
def getDataFiles(dname, ignore=None, parent=None): parent = (parent or '.') ignore = (ignore or []) result = [] for (directory, subdirectories, filenames) in os.walk(dname): resultfiles = [] for exname in EXCLUDE_NAMES: if (exname in subdirectories): subdirectories.remove(exname) for ig in ignore: if (ig in subdirectories): subdirectories.remove(ig) for filename in _filterNames(filenames): resultfiles.append(filename) if resultfiles: result.append((relativeTo(parent, directory), [relativeTo(parent, os.path.join(directory, filename)) for filename in resultfiles])) return result
[ "def", "getDataFiles", "(", "dname", ",", "ignore", "=", "None", ",", "parent", "=", "None", ")", ":", "parent", "=", "(", "parent", "or", "'.'", ")", "ignore", "=", "(", "ignore", "or", "[", "]", ")", "result", "=", "[", "]", "for", "(", "direct...
get all the data files that should be included in this distutils project .
train
false
31,045
def _pl(x): len_x = (x if isinstance(x, (integer_types, np.generic)) else len(x)) return ('' if (len_x == 1) else 's')
[ "def", "_pl", "(", "x", ")", ":", "len_x", "=", "(", "x", "if", "isinstance", "(", "x", ",", "(", "integer_types", ",", "np", ".", "generic", ")", ")", "else", "len", "(", "x", ")", ")", "return", "(", "''", "if", "(", "len_x", "==", "1", ")"...
determine if plural should be used .
train
false
31,046
def repaired(fmri): return _fmadm_action_fmri('repaired', fmri)
[ "def", "repaired", "(", "fmri", ")", ":", "return", "_fmadm_action_fmri", "(", "'repaired'", ",", "fmri", ")" ]
notify fault manager that resource has been repaired fmri: string fmri cli example: .
train
false
31,047
def aitchison_aitken(h, Xi, x, num_levels=None): Xi = Xi.reshape(Xi.size) if (num_levels is None): num_levels = np.asarray(np.unique(Xi).size) kernel_value = ((np.ones(Xi.size) * h) / (num_levels - 1)) idx = (Xi == x) kernel_value[idx] = (idx * (1 - h))[idx] return kernel_value
[ "def", "aitchison_aitken", "(", "h", ",", "Xi", ",", "x", ",", "num_levels", "=", "None", ")", ":", "Xi", "=", "Xi", ".", "reshape", "(", "Xi", ".", "size", ")", "if", "(", "num_levels", "is", "None", ")", ":", "num_levels", "=", "np", ".", "asar...
the aitchison-aitken kernel .
train
false
31,048
def test_progress_big_statusbar(qtbot, fake_statusbar, progress_widget): fake_statusbar.hbox.addWidget(progress_widget) progress_widget.show() expected_height = progress_widget.height() fake_statusbar.hbox.addStrut(50) assert (progress_widget.height() == expected_height)
[ "def", "test_progress_big_statusbar", "(", "qtbot", ",", "fake_statusbar", ",", "progress_widget", ")", ":", "fake_statusbar", ".", "hbox", ".", "addWidget", "(", "progress_widget", ")", "progress_widget", ".", "show", "(", ")", "expected_height", "=", "progress_wid...
make sure the progress bar is small with a big statusbar .
train
false
31,051
@manager.command() @click.argument('email', default=settings.MAIL_DEFAULT_SENDER, required=False) def send_test_mail(email=None): from redash import mail from flask_mail import Message if (email is None): email = settings.MAIL_DEFAULT_SENDER mail.send(Message(subject='Test Message from Redash', recipients=[email], body='Test message.'))
[ "@", "manager", ".", "command", "(", ")", "@", "click", ".", "argument", "(", "'email'", ",", "default", "=", "settings", ".", "MAIL_DEFAULT_SENDER", ",", "required", "=", "False", ")", "def", "send_test_mail", "(", "email", "=", "None", ")", ":", "from"...
send test message to email .
train
false
31,053
def _get_dev2_url(backend, instance=None): return ('http://%s' % _get_dev2_hostname(backend, instance))
[ "def", "_get_dev2_url", "(", "backend", ",", "instance", "=", "None", ")", ":", "return", "(", "'http://%s'", "%", "_get_dev2_hostname", "(", "backend", ",", "instance", ")", ")" ]
returns the url of a backend [instance] in devappserver2 .
train
false
31,054
def unpublicize_collection(committer_id, collection_id): _unpublicize_activity(committer_id, collection_id, feconf.ACTIVITY_TYPE_COLLECTION)
[ "def", "unpublicize_collection", "(", "committer_id", ",", "collection_id", ")", ":", "_unpublicize_activity", "(", "committer_id", ",", "collection_id", ",", "feconf", ".", "ACTIVITY_TYPE_COLLECTION", ")" ]
unpublicizes an collection .
train
false
31,055
def list_to_jquery_autocompletion_format(list): return '\n'.join(list)
[ "def", "list_to_jquery_autocompletion_format", "(", "list", ")", ":", "return", "'\\n'", ".", "join", "(", "list", ")" ]
converts a list to the format required by jquerys autocomplete plugin .
train
false
31,056
def cache_node(node, provider, opts): if isinstance(opts, dict): __opts__.update(opts) if (('update_cachedir' not in __opts__) or (not __opts__['update_cachedir'])): return if (not os.path.exists(os.path.join(__opts__['cachedir'], 'active'))): init_cachedir() base = os.path.join(__opts__['cachedir'], 'active') (provider, driver) = provider.split(':') prov_dir = os.path.join(base, driver, provider) if (not os.path.exists(prov_dir)): os.makedirs(prov_dir) path = os.path.join(prov_dir, '{0}.p'.format(node['name'])) with salt.utils.fopen(path, 'w') as fh_: msgpack.dump(node, fh_)
[ "def", "cache_node", "(", "node", ",", "provider", ",", "opts", ")", ":", "if", "isinstance", "(", "opts", ",", "dict", ")", ":", "__opts__", ".", "update", "(", "opts", ")", "if", "(", "(", "'update_cachedir'", "not", "in", "__opts__", ")", "or", "(...
cache node individually .
train
true
31,057
def linkify_with_outgoing(text, nofollow=True, only_full=False): callbacks = ([linkify_only_full_urls] if only_full else []) callbacks.append(linkify_bounce_url_callback) if nofollow: callbacks.append(bleach.callbacks.nofollow) return bleach.linkify(unicode(text), callbacks=callbacks)
[ "def", "linkify_with_outgoing", "(", "text", ",", "nofollow", "=", "True", ",", "only_full", "=", "False", ")", ":", "callbacks", "=", "(", "[", "linkify_only_full_urls", "]", "if", "only_full", "else", "[", "]", ")", "callbacks", ".", "append", "(", "link...
wrapper around bleach .
train
false
31,058
def load_privatekey(type, buffer, passphrase=None): if isinstance(buffer, _text_type): buffer = buffer.encode('ascii') bio = _new_mem_buf(buffer) helper = _PassphraseHelper(type, passphrase) if (type == FILETYPE_PEM): evp_pkey = _lib.PEM_read_bio_PrivateKey(bio, _ffi.NULL, helper.callback, helper.callback_args) helper.raise_if_problem() elif (type == FILETYPE_ASN1): evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) else: raise ValueError('type argument must be FILETYPE_PEM or FILETYPE_ASN1') if (evp_pkey == _ffi.NULL): _raise_current_error() pkey = PKey.__new__(PKey) pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) return pkey
[ "def", "load_privatekey", "(", "type", ",", "buffer", ",", "passphrase", "=", "None", ")", ":", "if", "isinstance", "(", "buffer", ",", "_text_type", ")", ":", "buffer", "=", "buffer", ".", "encode", "(", "'ascii'", ")", "bio", "=", "_new_mem_buf", "(", ...
load a private key from a buffer .
train
true
31,060
def expand_ipv6(address): packed_ip = socket.inet_pton(socket.AF_INET6, address) return socket.inet_ntop(socket.AF_INET6, packed_ip)
[ "def", "expand_ipv6", "(", "address", ")", ":", "packed_ip", "=", "socket", ".", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "address", ")", "return", "socket", ".", "inet_ntop", "(", "socket", ".", "AF_INET6", ",", "packed_ip", ")" ]
expand ipv6 address .
train
false
31,061
def _contiguous_regions(condition): d = np.diff(condition) (idx,) = d.nonzero() idx += 1 if condition[0]: idx = np.r_[(0, idx)] if condition[(-1)]: idx = np.r_[(idx, condition.size)] idx.shape = ((-1), 2) return idx
[ "def", "_contiguous_regions", "(", "condition", ")", ":", "d", "=", "np", ".", "diff", "(", "condition", ")", "(", "idx", ",", ")", "=", "d", ".", "nonzero", "(", ")", "idx", "+=", "1", "if", "condition", "[", "0", "]", ":", "idx", "=", "np", "...
finds contiguous true regions of the boolean array "condition" .
train
false
31,062
def debug_print(*message): if DEBUG: ss = STDOUT if PY3: for m in message: ss.buffer.write(str(m).encode('utf-8')) print('', file=ss) else: print(file=ss, *message)
[ "def", "debug_print", "(", "*", "message", ")", ":", "if", "DEBUG", ":", "ss", "=", "STDOUT", "if", "PY3", ":", "for", "m", "in", "message", ":", "ss", ".", "buffer", ".", "write", "(", "str", "(", "m", ")", ".", "encode", "(", "'utf-8'", ")", ...
special function for debug output .
train
false
31,063
def _format_position(data, pos): line = 0 col = 0 llb = u'' for c in data[:pos]: if ((c == u'\r') or (c == u'\n')): if (llb and (c != llb)): llb = u'' else: line += 1 col = 0 llb = c else: col += 1 llb = u'' return u'line {0}, column {1}'.format((line + 1), (col + 1))
[ "def", "_format_position", "(", "data", ",", "pos", ")", ":", "line", "=", "0", "col", "=", "0", "llb", "=", "u''", "for", "c", "in", "data", "[", ":", "pos", "]", ":", "if", "(", "(", "c", "==", "u'\\r'", ")", "or", "(", "c", "==", "u'\\n'",...
return position formatted as line/column .
train
false
31,064
def make_error_tracker(): errors = [] def error_tracker(msg): 'Log errors' exc_str = '' if in_exception_handler(): exc_str = exc_info_to_str(sys.exc_info()) if (('python2.7/site-packages/gunicorn/workers/sync.py' in exc_str) and ('[Errno 11] Resource temporarily unavailable' in exc_str)): exc_str = '' errors.append((msg, exc_str)) return ErrorLog(error_tracker, errors)
[ "def", "make_error_tracker", "(", ")", ":", "errors", "=", "[", "]", "def", "error_tracker", "(", "msg", ")", ":", "exc_str", "=", "''", "if", "in_exception_handler", "(", ")", ":", "exc_str", "=", "exc_info_to_str", "(", "sys", ".", "exc_info", "(", ")"...
return an errorlog .
train
false
31,065
def sendExact(signal=Any, sender=Anonymous, *arguments, **named): responses = [] for receiver in liveReceivers(getReceivers(sender, signal)): response = robustapply.robustApply(receiver, signal=signal, sender=sender, *arguments, **named) responses.append((receiver, response)) return responses
[ "def", "sendExact", "(", "signal", "=", "Any", ",", "sender", "=", "Anonymous", ",", "*", "arguments", ",", "**", "named", ")", ":", "responses", "=", "[", "]", "for", "receiver", "in", "liveReceivers", "(", "getReceivers", "(", "sender", ",", "signal", ...
send signal only to those receivers registered for exact message sendexact allows for avoiding any/anonymous registered handlers .
train
true
31,066
def metasploit_pattern(sets=None): sets = (sets or [string.ascii_uppercase, string.ascii_lowercase, string.digits]) offsets = ([0] * len(sets)) offsets_indexes_reversed = list(reversed(range(len(offsets)))) while True: for (i, j) in zip(sets, offsets): (yield i[j]) for i in offsets_indexes_reversed: offsets[i] = ((offsets[i] + 1) % len(sets[i])) if (offsets[i] != 0): break if (offsets == ([0] * len(sets))): return
[ "def", "metasploit_pattern", "(", "sets", "=", "None", ")", ":", "sets", "=", "(", "sets", "or", "[", "string", ".", "ascii_uppercase", ",", "string", ".", "ascii_lowercase", ",", "string", ".", "digits", "]", ")", "offsets", "=", "(", "[", "0", "]", ...
metasploit_pattern -> generator generator for a sequence of characters as per metasploit frameworks rex::text .
train
false
31,069
def re_validate_certificate(request, course_key, generated_certificate): try: certificate_invalidation = CertificateInvalidation.objects.get(generated_certificate=generated_certificate) except ObjectDoesNotExist: raise ValueError(_('Certificate Invalidation does not exist, Please refresh the page and try again.')) else: certificate_invalidation.deactivate() student = certificate_invalidation.generated_certificate.user lms.djangoapps.instructor_task.api.generate_certificates_for_students(request, course_key, student_set='specific_student', specific_student_id=student.id)
[ "def", "re_validate_certificate", "(", "request", ",", "course_key", ",", "generated_certificate", ")", ":", "try", ":", "certificate_invalidation", "=", "CertificateInvalidation", ".", "objects", ".", "get", "(", "generated_certificate", "=", "generated_certificate", "...
remove certificate invalidation from db and start certificate generation task for this student .
train
false
31,070
def change_yaml_files(check_dict): root = get_test_config_dir() keys = check_dict['yaml_files'].keys() check_dict['yaml_files'] = [] for key in sorted(keys): if (not key.startswith('/')): check_dict['yaml_files'].append(key) if key.startswith(root): check_dict['yaml_files'].append(('...' + key[len(root):]))
[ "def", "change_yaml_files", "(", "check_dict", ")", ":", "root", "=", "get_test_config_dir", "(", ")", "keys", "=", "check_dict", "[", "'yaml_files'", "]", ".", "keys", "(", ")", "check_dict", "[", "'yaml_files'", "]", "=", "[", "]", "for", "key", "in", ...
change the [yaml_files] property and remove the config path .
train
false
31,072
def _infer_language(backend): langs = _lang_lookup.get(backend.upper(), False) if (not langs): raise ValueError(('Unrecognized backend: ' + backend)) return langs[0]
[ "def", "_infer_language", "(", "backend", ")", ":", "langs", "=", "_lang_lookup", ".", "get", "(", "backend", ".", "upper", "(", ")", ",", "False", ")", "if", "(", "not", "langs", ")", ":", "raise", "ValueError", "(", "(", "'Unrecognized backend: '", "+"...
for a given backend .
train
false
31,073
def wait_for_video_player_ready(context, wait_time=MAX_WAIT_TIME): class ContextWithMixin(BrowserActionMixins, ): def __init__(self): self.browser = context.browser context_wm = ContextWithMixin() context_wm.browser_wait_for_js_object_exists('$') context_wm.browser_wait_for_js_object_exists('$("video")') context.browser.execute_script('$("video").trigger("loadedmetadata");') try: context_wm.browser_wait_for_js_condition('window._kalite_debug.video_player_initialized', max_wait_time=wait_time) except KALiteTimeout as e: raise TimeoutException(str(e))
[ "def", "wait_for_video_player_ready", "(", "context", ",", "wait_time", "=", "MAX_WAIT_TIME", ")", ":", "class", "ContextWithMixin", "(", "BrowserActionMixins", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "browser", "=", "context", "....
if youre on the "learn" page with video content .
train
false
31,074
def nice_time(time): if (time < 15): return 'moments' if (time < 90): return ('%d seconds' % time) if (time < ((60 * 60) * 1.5)): return ('%d minutes' % (time / 60.0)) if (time < (((24 * 60) * 60) * 1.5)): return ('%d hours' % (time / 3600.0)) if (time < ((((7 * 24) * 60) * 60) * 1.5)): return ('%d days' % (time / 86400.0)) if (time < ((((30 * 24) * 60) * 60) * 1.5)): return ('%d weeks' % (time / 604800.0)) return ('%d months' % (time / 2592000.0))
[ "def", "nice_time", "(", "time", ")", ":", "if", "(", "time", "<", "15", ")", ":", "return", "'moments'", "if", "(", "time", "<", "90", ")", ":", "return", "(", "'%d seconds'", "%", "time", ")", "if", "(", "time", "<", "(", "(", "60", "*", "60"...
format a time in seconds to a string like "5 minutes" .
train
false
31,076
def floating_ip_update(context, address, values): return IMPL.floating_ip_update(context, address, values)
[ "def", "floating_ip_update", "(", "context", ",", "address", ",", "values", ")", ":", "return", "IMPL", ".", "floating_ip_update", "(", "context", ",", "address", ",", "values", ")" ]
update a floating ip by address or raise if it doesnt exist .
train
false
31,077
def serialize_array(array, force_list=False): if isinstance(array, np.ma.MaskedArray): array = array.filled(np.nan) if (array_encoding_disabled(array) or force_list): return transform_array_to_list(array) if (not array.flags['C_CONTIGUOUS']): array = np.ascontiguousarray(array) return encode_base64_dict(array)
[ "def", "serialize_array", "(", "array", ",", "force_list", "=", "False", ")", ":", "if", "isinstance", "(", "array", ",", "np", ".", "ma", ".", "MaskedArray", ")", ":", "array", "=", "array", ".", "filled", "(", "np", ".", "nan", ")", "if", "(", "a...
transforms a numpy array into serialized form .
train
true
31,078
def sort_json_policy_dict(policy_dict): def value_is_list(my_list): checked_list = [] for item in my_list: if isinstance(item, dict): checked_list.append(sort_json_policy_dict(item)) elif isinstance(item, list): checked_list.append(value_is_list(item)) else: checked_list.append(item) checked_list.sort() return checked_list ordered_policy_dict = {} for (key, value) in policy_dict.items(): if isinstance(value, dict): ordered_policy_dict[key] = sort_json_policy_dict(value) elif isinstance(value, list): ordered_policy_dict[key] = value_is_list(value) else: ordered_policy_dict[key] = value return ordered_policy_dict
[ "def", "sort_json_policy_dict", "(", "policy_dict", ")", ":", "def", "value_is_list", "(", "my_list", ")", ":", "checked_list", "=", "[", "]", "for", "item", "in", "my_list", ":", "if", "isinstance", "(", "item", ",", "dict", ")", ":", "checked_list", ".",...
sort any lists in an iam json policy so that comparison of two policies with identical values but different orders will return true args: policy_dict : dict representing iam json policy .
train
false
31,079
def test_rgb_to_hsl_part_7(): assert (rgb_to_hsl(153, 102, 102) == (0, 20, 50)) assert (rgb_to_hsl(204, 51, 51) == (0, 60, 50)) assert (rgb_to_hsl(255, 0, 0) == (0, 100, 50))
[ "def", "test_rgb_to_hsl_part_7", "(", ")", ":", "assert", "(", "rgb_to_hsl", "(", "153", ",", "102", ",", "102", ")", "==", "(", "0", ",", "20", ",", "50", ")", ")", "assert", "(", "rgb_to_hsl", "(", "204", ",", "51", ",", "51", ")", "==", "(", ...
test rgb to hsl color function .
train
false
31,080
def _params_extend(params, _ignore_name=False, **kwargs): for key in kwargs.keys(): if (not key.startswith('_')): params.setdefault(key, kwargs[key]) if _ignore_name: params.pop('name', None) if ('firstname' in params): params['name'] = params.pop('firstname') elif ('visible_name' in params): params['name'] = params.pop('visible_name') return params
[ "def", "_params_extend", "(", "params", ",", "_ignore_name", "=", "False", ",", "**", "kwargs", ")", ":", "for", "key", "in", "kwargs", ".", "keys", "(", ")", ":", "if", "(", "not", "key", ".", "startswith", "(", "'_'", ")", ")", ":", "params", "."...
extends the params dictionary by values from keyword arguments .
train
true
31,081
@require_admin @api_handle_error_with_json def delete_language_pack(request): lang_code = simplejson.loads((request.body or '{}')).get('lang') delete_language(lang_code) return JsonResponse({'success': (_('Successfully deleted language pack for %(lang_name)s.') % {'lang_name': get_language_name(lang_code)})})
[ "@", "require_admin", "@", "api_handle_error_with_json", "def", "delete_language_pack", "(", "request", ")", ":", "lang_code", "=", "simplejson", ".", "loads", "(", "(", "request", ".", "body", "or", "'{}'", ")", ")", ".", "get", "(", "'lang'", ")", "delete_...
api endpoint for deleting language pack which fetches the language code which has to be deleted .
train
false
31,082
def dmp_nth(f, n, u, K): if (n < 0): raise IndexError(("'n' must be non-negative, got %i" % n)) elif (n >= len(f)): return dmp_zero((u - 1)) else: return f[(dmp_degree(f, u) - n)]
[ "def", "dmp_nth", "(", "f", ",", "n", ",", "u", ",", "K", ")", ":", "if", "(", "n", "<", "0", ")", ":", "raise", "IndexError", "(", "(", "\"'n' must be non-negative, got %i\"", "%", "n", ")", ")", "elif", "(", "n", ">=", "len", "(", "f", ")", "...
return the n-th coefficient of f in k[x] .
train
false
31,083
def tree_encode(shalist): shalist = sorted(shalist, key=shalist_item_sort_key) l = [] for (mode, name, bin) in shalist: assert mode assert ((mode + 0) == mode) assert name assert (len(bin) == 20) s = ('%o %s\x00%s' % (mode, name, bin)) assert (s[0] != '0') l.append(s) return ''.join(l)
[ "def", "tree_encode", "(", "shalist", ")", ":", "shalist", "=", "sorted", "(", "shalist", ",", "key", "=", "shalist_item_sort_key", ")", "l", "=", "[", "]", "for", "(", "mode", ",", "name", ",", "bin", ")", "in", "shalist", ":", "assert", "mode", "as...
generate a git tree object from tuples .
train
false
31,084
def varinversefilter(ar, nobs, version=1): (nlags, nvars, nvarsex) = ar.shape if (nvars != nvarsex): print('exogenous variables not implemented not tested') arinv = np.zeros(((nobs + 1), nvarsex, nvars)) arinv[0, :, :] = ar[0] arinv[1:nlags, :, :] = (- ar[1:]) if (version == 1): for i in range(2, (nobs + 1)): tmp = np.zeros((nvars, nvars)) for p in range(1, nlags): tmp += np.dot((- ar[p]), arinv[(i - p), :, :]) arinv[i, :, :] = tmp if (version == 0): for i in range((nlags + 1), (nobs + 1)): print(ar[1:].shape, arinv[(i - 1):(i - nlags):(-1), :, :].shape) raise NotImplementedError('waiting for generalized ufuncs or something') return arinv
[ "def", "varinversefilter", "(", "ar", ",", "nobs", ",", "version", "=", "1", ")", ":", "(", "nlags", ",", "nvars", ",", "nvarsex", ")", "=", "ar", ".", "shape", "if", "(", "nvars", "!=", "nvarsex", ")", ":", "print", "(", "'exogenous variables not impl...
creates inverse ar filter recursively the var lag polynomial is defined by :: ar(l) y_t = u_t or y_t = -ar_{-1}(l) y_{t-1} + u_t the returned lagpolynomial is arinv(l)=ar^{-1}(l) in :: y_t = arinv(l) u_t parameters ar : array .
train
false
31,085
def getComplexDefaultByDictionaryKeys(defaultComplex, dictionary, keyX, keyY): x = getFloatDefaultByDictionary(defaultComplex.real, dictionary, keyX) y = getFloatDefaultByDictionary(defaultComplex.real, dictionary, keyY) return complex(x, y)
[ "def", "getComplexDefaultByDictionaryKeys", "(", "defaultComplex", ",", "dictionary", ",", "keyX", ",", "keyY", ")", ":", "x", "=", "getFloatDefaultByDictionary", "(", "defaultComplex", ".", "real", ",", "dictionary", ",", "keyX", ")", "y", "=", "getFloatDefaultBy...
get the value as a complex .
train
false
31,086
def _discrete_log_trial_mul(n, a, b, order=None): a %= n b %= n if (order is None): order = n x = 1 k = 1 for i in range(order): if (x == a): return i x = ((x * b) % n) raise ValueError('Log does not exist')
[ "def", "_discrete_log_trial_mul", "(", "n", ",", "a", ",", "b", ",", "order", "=", "None", ")", ":", "a", "%=", "n", "b", "%=", "n", "if", "(", "order", "is", "None", ")", ":", "order", "=", "n", "x", "=", "1", "k", "=", "1", "for", "i", "i...
trial multiplication algorithm for computing the discrete logarithm of a to the base b modulo n .
train
false
31,088
def safe_rm_oldest_items_in_dir(root_dir, num_of_items_to_keep, excludes=frozenset()): if os.path.isdir(root_dir): found_files = [] for old_file in os.listdir(root_dir): full_path = os.path.join(root_dir, old_file) if (full_path not in excludes): found_files.append((full_path, os.path.getmtime(full_path))) found_files = sorted(found_files, key=(lambda x: x[1]), reverse=True) for (cur_file, _) in found_files[num_of_items_to_keep:]: rm_rf(cur_file)
[ "def", "safe_rm_oldest_items_in_dir", "(", "root_dir", ",", "num_of_items_to_keep", ",", "excludes", "=", "frozenset", "(", ")", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "root_dir", ")", ":", "found_files", "=", "[", "]", "for", "old_file", "i...
keep num_of_items_to_keep newly modified items besides excludes in root_dir then remove the rest .
train
true
31,090
@debug.increase_indent def search_params(evaluator, param): if (not settings.dynamic_params): return set() evaluator.dynamic_params_depth += 1 try: func = param.get_parent_until(tree.Function) debug.dbg('Dynamic param search for %s in %s.', param, str(func.name), color='MAGENTA') names = [n for n in search_function_call(evaluator, func) if (n.value == param.name.value)] result = set(chain.from_iterable((n.parent.eval(evaluator) for n in names))) debug.dbg('Dynamic param result %s', result, color='MAGENTA') return result finally: evaluator.dynamic_params_depth -= 1
[ "@", "debug", ".", "increase_indent", "def", "search_params", "(", "evaluator", ",", "param", ")", ":", "if", "(", "not", "settings", ".", "dynamic_params", ")", ":", "return", "set", "(", ")", "evaluator", ".", "dynamic_params_depth", "+=", "1", "try", ":...
a dynamic search for param values .
train
false
31,093
def get_cache_dir(): linkto = None if (set_temp_cache._temp_path is not None): xch = set_temp_cache._temp_path cache_path = os.path.join(xch, u'astropy') if (not os.path.exists(cache_path)): os.mkdir(cache_path) return os.path.abspath(cache_path) xch = os.environ.get(u'XDG_CACHE_HOME') if ((xch is not None) and os.path.exists(xch)): xchpth = os.path.join(xch, u'astropy') if (not os.path.islink(xchpth)): if os.path.exists(xchpth): return os.path.abspath(xchpth) else: linkto = xchpth return os.path.abspath(_find_or_create_astropy_dir(u'cache', linkto))
[ "def", "get_cache_dir", "(", ")", ":", "linkto", "=", "None", "if", "(", "set_temp_cache", ".", "_temp_path", "is", "not", "None", ")", ":", "xch", "=", "set_temp_cache", ".", "_temp_path", "cache_path", "=", "os", ".", "path", ".", "join", "(", "xch", ...
determines the astropy cache directory name and creates the directory if it doesnt exist .
train
false
31,094
def test_auth_error_handling(contact_sync, default_account, db): default_account.refresh_token = 'foo' for auth_creds in default_account.auth_credentials: auth_creds.refresh_token = 'foo' db.session.commit() contact_sync.start() contact_sync.join(timeout=10) success = contact_sync.successful() if (not success): contact_sync.kill() assert success, "contact sync greenlet didn't terminate."
[ "def", "test_auth_error_handling", "(", "contact_sync", ",", "default_account", ",", "db", ")", ":", "default_account", ".", "refresh_token", "=", "'foo'", "for", "auth_creds", "in", "default_account", ".", "auth_credentials", ":", "auth_creds", ".", "refresh_token", ...
test that the contact sync greenlet stops if account credentials are invalid .
train
false
31,096
def output(): return s3_rest_controller()
[ "def", "output", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
mane function .
train
false
31,097
def _build_subattr_match_rule(attr_name, attr, action, target): validate = attr['validate'] key = [k for k in validate.keys() if k.startswith('type:dict')] if (not key): LOG.warning(_LW('Unable to find data type descriptor for attribute %s'), attr_name) return data = validate[key[0]] if (not isinstance(data, dict)): LOG.debug('Attribute type descriptor is not a dict. Unable to generate any sub-attr policy rule for %s.', attr_name) return sub_attr_rules = [policy.RuleCheck('rule', ('%s:%s:%s' % (action, attr_name, sub_attr_name))) for sub_attr_name in data if (sub_attr_name in target[attr_name])] return policy.AndCheck(sub_attr_rules)
[ "def", "_build_subattr_match_rule", "(", "attr_name", ",", "attr", ",", "action", ",", "target", ")", ":", "validate", "=", "attr", "[", "'validate'", "]", "key", "=", "[", "k", "for", "k", "in", "validate", ".", "keys", "(", ")", "if", "k", ".", "st...
create the rule to match for sub-attribute policy checks .
train
false
31,098
def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): bulk_actions = [] (size, action_count) = (0, 0) for (action, data) in actions: action = serializer.dumps(action) cur_size = (len(action) + 1) if (data is not None): data = serializer.dumps(data) cur_size += (len(data) + 1) if (bulk_actions and (((size + cur_size) > max_chunk_bytes) or (action_count == chunk_size))): (yield bulk_actions) bulk_actions = [] (size, action_count) = (0, 0) bulk_actions.append(action) if (data is not None): bulk_actions.append(data) size += cur_size action_count += 1 if bulk_actions: (yield bulk_actions)
[ "def", "_chunk_actions", "(", "actions", ",", "chunk_size", ",", "max_chunk_bytes", ",", "serializer", ")", ":", "bulk_actions", "=", "[", "]", "(", "size", ",", "action_count", ")", "=", "(", "0", ",", "0", ")", "for", "(", "action", ",", "data", ")",...
split actions into chunks by number or size .
train
true