id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
2,561
def check_csrf_token(request, token='csrf_token', header='X-CSRF-Token', raises=True): supplied_token = '' if (token is not None): supplied_token = request.POST.get(token, '') if ((supplied_token == '') and (header is not None)): supplied_token = request.headers.get(header, '') expected_token = request.session.get_csrf_token() if strings_differ(bytes_(expected_token), bytes_(supplied_token)): if raises: raise BadCSRFToken('check_csrf_token(): Invalid token') return False return True
[ "def", "check_csrf_token", "(", "request", ",", "token", "=", "'csrf_token'", ",", "header", "=", "'X-CSRF-Token'", ",", "raises", "=", "True", ")", ":", "supplied_token", "=", "''", "if", "(", "token", "is", "not", "None", ")", ":", "supplied_token", "=", "request", ".", "POST", ".", "get", "(", "token", ",", "''", ")", "if", "(", "(", "supplied_token", "==", "''", ")", "and", "(", "header", "is", "not", "None", ")", ")", ":", "supplied_token", "=", "request", ".", "headers", ".", "get", "(", "header", ",", "''", ")", "expected_token", "=", "request", ".", "session", ".", "get_csrf_token", "(", ")", "if", "strings_differ", "(", "bytes_", "(", "expected_token", ")", ",", "bytes_", "(", "supplied_token", ")", ")", ":", "if", "raises", ":", "raise", "BadCSRFToken", "(", "'check_csrf_token(): Invalid token'", ")", "return", "False", "return", "True" ]
check the csrf token in the requests session against the value in request .
train
false
2,563
def random_zoom(x, zoom_range, row_axis=1, col_axis=2, channel_axis=0, fill_mode='nearest', cval=0.0): if (len(zoom_range) != 2): raise ValueError('zoom_range should be a tuple or list of two floats. Received arg: ', zoom_range) if ((zoom_range[0] == 1) and (zoom_range[1] == 1)): (zx, zy) = (1, 1) else: (zx, zy) = np.random.uniform(zoom_range[0], zoom_range[1], 2) zoom_matrix = np.array([[zx, 0, 0], [0, zy, 0], [0, 0, 1]]) (h, w) = (x.shape[row_axis], x.shape[col_axis]) transform_matrix = transform_matrix_offset_center(zoom_matrix, h, w) x = apply_transform(x, transform_matrix, channel_axis, fill_mode, cval) return x
[ "def", "random_zoom", "(", "x", ",", "zoom_range", ",", "row_axis", "=", "1", ",", "col_axis", "=", "2", ",", "channel_axis", "=", "0", ",", "fill_mode", "=", "'nearest'", ",", "cval", "=", "0.0", ")", ":", "if", "(", "len", "(", "zoom_range", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'zoom_range should be a tuple or list of two floats. Received arg: '", ",", "zoom_range", ")", "if", "(", "(", "zoom_range", "[", "0", "]", "==", "1", ")", "and", "(", "zoom_range", "[", "1", "]", "==", "1", ")", ")", ":", "(", "zx", ",", "zy", ")", "=", "(", "1", ",", "1", ")", "else", ":", "(", "zx", ",", "zy", ")", "=", "np", ".", "random", ".", "uniform", "(", "zoom_range", "[", "0", "]", ",", "zoom_range", "[", "1", "]", ",", "2", ")", "zoom_matrix", "=", "np", ".", "array", "(", "[", "[", "zx", ",", "0", ",", "0", "]", ",", "[", "0", ",", "zy", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", "]", "]", ")", "(", "h", ",", "w", ")", "=", "(", "x", ".", "shape", "[", "row_axis", "]", ",", "x", ".", "shape", "[", "col_axis", "]", ")", "transform_matrix", "=", "transform_matrix_offset_center", "(", "zoom_matrix", ",", "h", ",", "w", ")", "x", "=", "apply_transform", "(", "x", ",", "transform_matrix", ",", "channel_axis", ",", "fill_mode", ",", "cval", ")", "return", "x" ]
performs a random spatial zoom of a numpy image tensor .
train
false
2,564
def _route_flags(rflags): flags = '' fmap = {1: 'U', 2: 'G', 4: 'H', 8: 'R', 16: 'D', 32: 'M', 262144: 'A', 16777216: 'C', 512: '!'} for item in fmap.keys(): if (rflags & item): flags += fmap[item] return flags
[ "def", "_route_flags", "(", "rflags", ")", ":", "flags", "=", "''", "fmap", "=", "{", "1", ":", "'U'", ",", "2", ":", "'G'", ",", "4", ":", "'H'", ",", "8", ":", "'R'", ",", "16", ":", "'D'", ",", "32", ":", "'M'", ",", "262144", ":", "'A'", ",", "16777216", ":", "'C'", ",", "512", ":", "'!'", "}", "for", "item", "in", "fmap", ".", "keys", "(", ")", ":", "if", "(", "rflags", "&", "item", ")", ":", "flags", "+=", "fmap", "[", "item", "]", "return", "flags" ]
URL URL .
train
true
2,565
def get_image_id(kwargs=None, call=None): if (call == 'action'): raise SaltCloudSystemExit('The get_image_id function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) if (name is None): raise SaltCloudSystemExit('The get_image_id function requires a name.') try: ret = avail_images()[name]['id'] except KeyError: raise SaltCloudSystemExit("The image '{0}' could not be found".format(name)) return ret
[ "def", "get_image_id", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The get_image_id function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "None", ")", "if", "(", "name", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The get_image_id function requires a name.'", ")", "try", ":", "ret", "=", "avail_images", "(", ")", "[", "name", "]", "[", "'id'", "]", "except", "KeyError", ":", "raise", "SaltCloudSystemExit", "(", "\"The image '{0}' could not be found\"", ".", "format", "(", "name", ")", ")", "return", "ret" ]
returns an images id from the given image name .
train
true
2,566
def writewav24(filename, rate, data): a32 = _np.asarray(data, dtype=_np.int32) if (a32.ndim == 1): a32.shape = (a32.shape + (1,)) a8 = ((a32.reshape((a32.shape + (1,))) >> _np.array([0, 8, 16])) & 255) wavdata = a8.astype(_np.uint8).tostring() w = _wave.open(filename, 'wb') w.setnchannels(a32.shape[1]) w.setsampwidth(3) w.setframerate(rate) w.writeframes(wavdata) w.close()
[ "def", "writewav24", "(", "filename", ",", "rate", ",", "data", ")", ":", "a32", "=", "_np", ".", "asarray", "(", "data", ",", "dtype", "=", "_np", ".", "int32", ")", "if", "(", "a32", ".", "ndim", "==", "1", ")", ":", "a32", ".", "shape", "=", "(", "a32", ".", "shape", "+", "(", "1", ",", ")", ")", "a8", "=", "(", "(", "a32", ".", "reshape", "(", "(", "a32", ".", "shape", "+", "(", "1", ",", ")", ")", ")", ">>", "_np", ".", "array", "(", "[", "0", ",", "8", ",", "16", "]", ")", ")", "&", "255", ")", "wavdata", "=", "a8", ".", "astype", "(", "_np", ".", "uint8", ")", ".", "tostring", "(", ")", "w", "=", "_wave", ".", "open", "(", "filename", ",", "'wb'", ")", "w", ".", "setnchannels", "(", "a32", ".", "shape", "[", "1", "]", ")", "w", ".", "setsampwidth", "(", "3", ")", "w", ".", "setframerate", "(", "rate", ")", "w", ".", "writeframes", "(", "wavdata", ")", "w", ".", "close", "(", ")" ]
create a 24 bit wav file .
train
false
2,567
def test_zero_precision_recall(): try: old_error_settings = np.seterr(all='raise') y_real = np.array([['a', 'b', 'c']]) y_pred = np.array([[]]) assert_array_almost_equal(precision_score(y_real, y_pred), 0.0, 2) assert_array_almost_equal(recall_score(y_real, y_pred), 0.0, 2) assert_array_almost_equal(f1_score(y_real, y_pred), 0.0, 2) finally: np.seterr(**old_error_settings)
[ "def", "test_zero_precision_recall", "(", ")", ":", "try", ":", "old_error_settings", "=", "np", ".", "seterr", "(", "all", "=", "'raise'", ")", "y_real", "=", "np", ".", "array", "(", "[", "[", "'a'", ",", "'b'", ",", "'c'", "]", "]", ")", "y_pred", "=", "np", ".", "array", "(", "[", "[", "]", "]", ")", "assert_array_almost_equal", "(", "precision_score", "(", "y_real", ",", "y_pred", ")", ",", "0.0", ",", "2", ")", "assert_array_almost_equal", "(", "recall_score", "(", "y_real", ",", "y_pred", ")", ",", "0.0", ",", "2", ")", "assert_array_almost_equal", "(", "f1_score", "(", "y_real", ",", "y_pred", ")", ",", "0.0", ",", "2", ")", "finally", ":", "np", ".", "seterr", "(", "**", "old_error_settings", ")" ]
check that pathological cases do not bring nans .
train
false
2,570
def successResponse(response): response = str(response) return ('+OK %s\r\n' % (response,))
[ "def", "successResponse", "(", "response", ")", ":", "response", "=", "str", "(", "response", ")", "return", "(", "'+OK %s\\r\\n'", "%", "(", "response", ",", ")", ")" ]
format an object as a positive response .
train
false
2,572
def list_minus(l, minus): return [o for o in l if (o not in minus)]
[ "def", "list_minus", "(", "l", ",", "minus", ")", ":", "return", "[", "o", "for", "o", "in", "l", "if", "(", "o", "not", "in", "minus", ")", "]" ]
returns l without what is in minus .
train
true
2,573
def check_pickling_recovery(original, protocol): f = pickle.dumps(original, protocol=protocol) unpickled = pickle.loads(f) class_history = [original.__class__] generic_recursive_equality_test(original, unpickled, class_history)
[ "def", "check_pickling_recovery", "(", "original", ",", "protocol", ")", ":", "f", "=", "pickle", ".", "dumps", "(", "original", ",", "protocol", "=", "protocol", ")", "unpickled", "=", "pickle", ".", "loads", "(", "f", ")", "class_history", "=", "[", "original", ".", "__class__", "]", "generic_recursive_equality_test", "(", "original", ",", "unpickled", ",", "class_history", ")" ]
try to pickle an object .
train
false
2,574
def restore_cache(old_cache): global FS_CACHE FS_CACHE = old_cache
[ "def", "restore_cache", "(", "old_cache", ")", ":", "global", "FS_CACHE", "FS_CACHE", "=", "old_cache" ]
restores cache from the result of a previous clear_cache call .
train
false
2,578
def check_no_alert(): try: assert_is_none(world.browser.get_alert()) except NoAlertPresentException: pass
[ "def", "check_no_alert", "(", ")", ":", "try", ":", "assert_is_none", "(", "world", ".", "browser", ".", "get_alert", "(", ")", ")", "except", "NoAlertPresentException", ":", "pass" ]
make sure the alert has gone away .
train
false
2,579
def rsa_encrypt(data, rsa_pub_key_str): key = RSA.importKey(rsa_pub_key_str) cipher = PKCS1_OAEP.new(key) encrypted_data = cipher.encrypt(data) return encrypted_data
[ "def", "rsa_encrypt", "(", "data", ",", "rsa_pub_key_str", ")", ":", "key", "=", "RSA", ".", "importKey", "(", "rsa_pub_key_str", ")", "cipher", "=", "PKCS1_OAEP", ".", "new", "(", "key", ")", "encrypted_data", "=", "cipher", ".", "encrypt", "(", "data", ")", "return", "encrypted_data" ]
rsa_pub_key is a string with the public key .
train
false
2,580
def _api_undefined(name, output, kwargs): return report(output, _MSG_NOT_IMPLEMENTED)
[ "def", "_api_undefined", "(", "name", ",", "output", ",", "kwargs", ")", ":", "return", "report", "(", "output", ",", "_MSG_NOT_IMPLEMENTED", ")" ]
api: accepts output .
train
false
2,581
@_get_client def image_location_delete(client, image_id, location_id, status, session=None): client.image_location_delete(image_id=image_id, location_id=location_id, status=status)
[ "@", "_get_client", "def", "image_location_delete", "(", "client", ",", "image_id", ",", "location_id", ",", "status", ",", "session", "=", "None", ")", ":", "client", ".", "image_location_delete", "(", "image_id", "=", "image_id", ",", "location_id", "=", "location_id", ",", "status", "=", "status", ")" ]
delete an image location .
train
false
2,582
@contextlib.contextmanager def archive_context(filename): tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) with ContextualZipFile(filename) as archive: archive.extractall() subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) (yield) finally: os.chdir(old_wd) shutil.rmtree(tmpdir)
[ "@", "contextlib", ".", "contextmanager", "def", "archive_context", "(", "filename", ")", ":", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "log", ".", "warn", "(", "'Extracting in %s'", ",", "tmpdir", ")", "old_wd", "=", "os", ".", "getcwd", "(", ")", "try", ":", "os", ".", "chdir", "(", "tmpdir", ")", "with", "ContextualZipFile", "(", "filename", ")", "as", "archive", ":", "archive", ".", "extractall", "(", ")", "subdir", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "os", ".", "listdir", "(", "tmpdir", ")", "[", "0", "]", ")", "os", ".", "chdir", "(", "subdir", ")", "log", ".", "warn", "(", "'Now working in %s'", ",", "subdir", ")", "(", "yield", ")", "finally", ":", "os", ".", "chdir", "(", "old_wd", ")", "shutil", ".", "rmtree", "(", "tmpdir", ")" ]
unzip filename to a temporary directory .
train
true
2,583
def list_config_modules(etcdir): if (not os.path.isdir(etcdir)): return iter(()) return (name for name in os.listdir(etcdir) if (name.endswith('.py') and os.path.isfile(os.path.join(etcdir, name))))
[ "def", "list_config_modules", "(", "etcdir", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "etcdir", ")", ")", ":", "return", "iter", "(", "(", ")", ")", "return", "(", "name", "for", "name", "in", "os", ".", "listdir", "(", "etcdir", ")", "if", "(", "name", ".", "endswith", "(", "'.py'", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "etcdir", ",", "name", ")", ")", ")", ")" ]
returns an iterator that yields the name of all the config modules .
train
false
2,585
def state_argspec(module=''): st_ = salt.state.State(__opts__) return salt.utils.argspec_report(st_.states, module)
[ "def", "state_argspec", "(", "module", "=", "''", ")", ":", "st_", "=", "salt", ".", "state", ".", "State", "(", "__opts__", ")", "return", "salt", ".", "utils", ".", "argspec_report", "(", "st_", ".", "states", ",", "module", ")" ]
return the argument specification of functions in salt state modules .
train
true
2,587
def add_fossil_segment(powerline): try: _add_fossil_segment(powerline) except OSError: pass except subprocess.CalledProcessError: pass
[ "def", "add_fossil_segment", "(", "powerline", ")", ":", "try", ":", "_add_fossil_segment", "(", "powerline", ")", "except", "OSError", ":", "pass", "except", "subprocess", ".", "CalledProcessError", ":", "pass" ]
wraps _add_fossil_segment in exception handling .
train
false
2,588
def get_view_restrictions(pages): restricted_pages = defaultdict(list) if (not get_cms_setting('PERMISSION')): return restricted_pages if (not pages): return restricted_pages is_public_pages = (not pages[0].publisher_is_draft) if is_public_pages: draft_ids = (page.publisher_public_id for page in pages) pages = Page.objects.filter(pk__in=draft_ids).select_related('parent') pages_list = load_ancestors(pages) pages_by_id = dict(((page.pk, page) for page in pages_list)) page_permissions = PagePermission.objects.filter(page__in=pages_by_id, can_view=True) for perm in page_permissions: perm._page_cache = pages_by_id[perm.page_id] for page_id in perm.get_page_ids(): restricted_pages[page_id].append(perm) return restricted_pages
[ "def", "get_view_restrictions", "(", "pages", ")", ":", "restricted_pages", "=", "defaultdict", "(", "list", ")", "if", "(", "not", "get_cms_setting", "(", "'PERMISSION'", ")", ")", ":", "return", "restricted_pages", "if", "(", "not", "pages", ")", ":", "return", "restricted_pages", "is_public_pages", "=", "(", "not", "pages", "[", "0", "]", ".", "publisher_is_draft", ")", "if", "is_public_pages", ":", "draft_ids", "=", "(", "page", ".", "publisher_public_id", "for", "page", "in", "pages", ")", "pages", "=", "Page", ".", "objects", ".", "filter", "(", "pk__in", "=", "draft_ids", ")", ".", "select_related", "(", "'parent'", ")", "pages_list", "=", "load_ancestors", "(", "pages", ")", "pages_by_id", "=", "dict", "(", "(", "(", "page", ".", "pk", ",", "page", ")", "for", "page", "in", "pages_list", ")", ")", "page_permissions", "=", "PagePermission", ".", "objects", ".", "filter", "(", "page__in", "=", "pages_by_id", ",", "can_view", "=", "True", ")", "for", "perm", "in", "page_permissions", ":", "perm", ".", "_page_cache", "=", "pages_by_id", "[", "perm", ".", "page_id", "]", "for", "page_id", "in", "perm", ".", "get_page_ids", "(", ")", ":", "restricted_pages", "[", "page_id", "]", ".", "append", "(", "perm", ")", "return", "restricted_pages" ]
load all view restrictions for the pages .
train
false
2,589
def force_release_hosts(host_filter_data, username=None): hosts = models.Host.query_objects(host_filter_data) reservations.force_release(hosts_to_release=[h.hostname for h in hosts], username=username)
[ "def", "force_release_hosts", "(", "host_filter_data", ",", "username", "=", "None", ")", ":", "hosts", "=", "models", ".", "Host", ".", "query_objects", "(", "host_filter_data", ")", "reservations", ".", "force_release", "(", "hosts_to_release", "=", "[", "h", ".", "hostname", "for", "h", "in", "hosts", "]", ",", "username", "=", "username", ")" ]
force release some hosts .
train
false
2,590
def change_set_to_roles(files, git_dir, roles_dirs, playbooks_dirs, graph): items = set() for role_dir in roles_dirs: role_dir_path = pathlib2.Path(git_dir, role_dir) candidate_files = {f for f in role_dir_path.glob('**/*')} for f in files: file_path = pathlib2.Path(git_dir, f) if (file_path in candidate_files): items.add(_get_role_name_from_file(file_path)) return items
[ "def", "change_set_to_roles", "(", "files", ",", "git_dir", ",", "roles_dirs", ",", "playbooks_dirs", ",", "graph", ")", ":", "items", "=", "set", "(", ")", "for", "role_dir", "in", "roles_dirs", ":", "role_dir_path", "=", "pathlib2", ".", "Path", "(", "git_dir", ",", "role_dir", ")", "candidate_files", "=", "{", "f", "for", "f", "in", "role_dir_path", ".", "glob", "(", "'**/*'", ")", "}", "for", "f", "in", "files", ":", "file_path", "=", "pathlib2", ".", "Path", "(", "git_dir", ",", "f", ")", "if", "(", "file_path", "in", "candidate_files", ")", ":", "items", ".", "add", "(", "_get_role_name_from_file", "(", "file_path", ")", ")", "return", "items" ]
converts change set consisting of a number of files to the roles that they represent/contain .
train
false
2,591
def numpy_to_votable_dtype(dtype, shape): if (dtype.num not in numpy_dtype_to_field_mapping): raise TypeError(u'{0!r} can not be represented in VOTable'.format(dtype)) if (dtype.char == u'S'): return {u'datatype': u'char', u'arraysize': str(dtype.itemsize)} elif (dtype.char == u'U'): return {u'datatype': u'unicodeChar', u'arraysize': str((dtype.itemsize // 4))} else: result = {u'datatype': numpy_dtype_to_field_mapping[dtype.num]} if len(shape): result[u'arraysize'] = u'x'.join((str(x) for x in shape)) return result
[ "def", "numpy_to_votable_dtype", "(", "dtype", ",", "shape", ")", ":", "if", "(", "dtype", ".", "num", "not", "in", "numpy_dtype_to_field_mapping", ")", ":", "raise", "TypeError", "(", "u'{0!r} can not be represented in VOTable'", ".", "format", "(", "dtype", ")", ")", "if", "(", "dtype", ".", "char", "==", "u'S'", ")", ":", "return", "{", "u'datatype'", ":", "u'char'", ",", "u'arraysize'", ":", "str", "(", "dtype", ".", "itemsize", ")", "}", "elif", "(", "dtype", ".", "char", "==", "u'U'", ")", ":", "return", "{", "u'datatype'", ":", "u'unicodeChar'", ",", "u'arraysize'", ":", "str", "(", "(", "dtype", ".", "itemsize", "//", "4", ")", ")", "}", "else", ":", "result", "=", "{", "u'datatype'", ":", "numpy_dtype_to_field_mapping", "[", "dtype", ".", "num", "]", "}", "if", "len", "(", "shape", ")", ":", "result", "[", "u'arraysize'", "]", "=", "u'x'", ".", "join", "(", "(", "str", "(", "x", ")", "for", "x", "in", "shape", ")", ")", "return", "result" ]
converts a numpy dtype and shape to a dictionary of attributes for a votable field element and correspond to that type .
train
false
2,592
def _bitstring_topology(tree): bitstrs = {} for (clade, bitstr) in _tree_to_bitstrs(tree).items(): bitstrs[bitstr] = round((clade.branch_length or 0.0), 5) return bitstrs
[ "def", "_bitstring_topology", "(", "tree", ")", ":", "bitstrs", "=", "{", "}", "for", "(", "clade", ",", "bitstr", ")", "in", "_tree_to_bitstrs", "(", "tree", ")", ".", "items", "(", ")", ":", "bitstrs", "[", "bitstr", "]", "=", "round", "(", "(", "clade", ".", "branch_length", "or", "0.0", ")", ",", "5", ")", "return", "bitstrs" ]
generates a branch length dict for a tree .
train
false
2,593
def get_returner_options(virtualname=None, ret=None, attrs=None, **kwargs): ret_config = _fetch_ret_config(ret) attrs = (attrs or {}) profile_attr = kwargs.get('profile_attr', None) profile_attrs = kwargs.get('profile_attrs', None) defaults = kwargs.get('defaults', None) __salt__ = kwargs.get('__salt__', {}) __opts__ = kwargs.get('__opts__', {}) cfg = __salt__.get('config.option', __opts__) _options = dict(_options_browser(cfg, ret_config, defaults, virtualname, attrs)) _options.update(_fetch_profile_opts(cfg, virtualname, __salt__, _options, profile_attr, profile_attrs)) if (ret and ('ret_kwargs' in ret)): _options.update(ret['ret_kwargs']) return _options
[ "def", "get_returner_options", "(", "virtualname", "=", "None", ",", "ret", "=", "None", ",", "attrs", "=", "None", ",", "**", "kwargs", ")", ":", "ret_config", "=", "_fetch_ret_config", "(", "ret", ")", "attrs", "=", "(", "attrs", "or", "{", "}", ")", "profile_attr", "=", "kwargs", ".", "get", "(", "'profile_attr'", ",", "None", ")", "profile_attrs", "=", "kwargs", ".", "get", "(", "'profile_attrs'", ",", "None", ")", "defaults", "=", "kwargs", ".", "get", "(", "'defaults'", ",", "None", ")", "__salt__", "=", "kwargs", ".", "get", "(", "'__salt__'", ",", "{", "}", ")", "__opts__", "=", "kwargs", ".", "get", "(", "'__opts__'", ",", "{", "}", ")", "cfg", "=", "__salt__", ".", "get", "(", "'config.option'", ",", "__opts__", ")", "_options", "=", "dict", "(", "_options_browser", "(", "cfg", ",", "ret_config", ",", "defaults", ",", "virtualname", ",", "attrs", ")", ")", "_options", ".", "update", "(", "_fetch_profile_opts", "(", "cfg", ",", "virtualname", ",", "__salt__", ",", "_options", ",", "profile_attr", ",", "profile_attrs", ")", ")", "if", "(", "ret", "and", "(", "'ret_kwargs'", "in", "ret", ")", ")", ":", "_options", ".", "update", "(", "ret", "[", "'ret_kwargs'", "]", ")", "return", "_options" ]
get the returner options from salt .
train
true
2,594
def computeTokenTypes(tokenNames): if (tokenNames is None): return {} return dict(((name, type) for (type, name) in enumerate(tokenNames)))
[ "def", "computeTokenTypes", "(", "tokenNames", ")", ":", "if", "(", "tokenNames", "is", "None", ")", ":", "return", "{", "}", "return", "dict", "(", "(", "(", "name", ",", "type", ")", "for", "(", "type", ",", "name", ")", "in", "enumerate", "(", "tokenNames", ")", ")", ")" ]
compute a dict that is an inverted index of tokennames .
train
false
2,595
def fitLineLSQ(pts): n = len(pts) a = np.ones((n, 2)) for i in range(n): a[(i, 0)] = pts[(i, 0)] line = lstsq(a, pts[:, 1])[0] return line
[ "def", "fitLineLSQ", "(", "pts", ")", ":", "n", "=", "len", "(", "pts", ")", "a", "=", "np", ".", "ones", "(", "(", "n", ",", "2", ")", ")", "for", "i", "in", "range", "(", "n", ")", ":", "a", "[", "(", "i", ",", "0", ")", "]", "=", "pts", "[", "(", "i", ",", "0", ")", "]", "line", "=", "lstsq", "(", "a", ",", "pts", "[", ":", ",", "1", "]", ")", "[", "0", "]", "return", "line" ]
returns a line fit with least squares .
train
false
2,596
def parse_any_descriptor(descr, name=None): assert descr if (descr[0] == '('): return parse_method_descriptor(descr, name) else: return parse_field_descriptor(descr, name)
[ "def", "parse_any_descriptor", "(", "descr", ",", "name", "=", "None", ")", ":", "assert", "descr", "if", "(", "descr", "[", "0", "]", "==", "'('", ")", ":", "return", "parse_method_descriptor", "(", "descr", ",", "name", ")", "else", ":", "return", "parse_field_descriptor", "(", "descr", ",", "name", ")" ]
parse either a field or method descriptor .
train
false
2,598
@pytest.mark.mercurial def test_freeze_mercurial_clone(script, tmpdir): pkg_version = _create_test_package(script, vcs='hg') result = script.run('hg', 'clone', pkg_version, 'pip-test-package', expect_stderr=True) repo_dir = (script.scratch_path / 'pip-test-package') result = script.run('python', 'setup.py', 'develop', cwd=repo_dir, expect_stderr=True) result = script.pip('freeze', expect_stderr=True) expected = textwrap.dedent('\n ...-e hg+...#egg=version_pkg\n ...\n ').strip() _check_output(result.stdout, expected) result = script.pip('freeze', '-f', ('%s#egg=pip_test_package' % repo_dir), expect_stderr=True) expected = textwrap.dedent(('\n -f %(repo)s#egg=pip_test_package...\n ...-e hg+...#egg=version_pkg\n ...\n ' % {'repo': repo_dir})).strip() _check_output(result.stdout, expected)
[ "@", "pytest", ".", "mark", ".", "mercurial", "def", "test_freeze_mercurial_clone", "(", "script", ",", "tmpdir", ")", ":", "pkg_version", "=", "_create_test_package", "(", "script", ",", "vcs", "=", "'hg'", ")", "result", "=", "script", ".", "run", "(", "'hg'", ",", "'clone'", ",", "pkg_version", ",", "'pip-test-package'", ",", "expect_stderr", "=", "True", ")", "repo_dir", "=", "(", "script", ".", "scratch_path", "/", "'pip-test-package'", ")", "result", "=", "script", ".", "run", "(", "'python'", ",", "'setup.py'", ",", "'develop'", ",", "cwd", "=", "repo_dir", ",", "expect_stderr", "=", "True", ")", "result", "=", "script", ".", "pip", "(", "'freeze'", ",", "expect_stderr", "=", "True", ")", "expected", "=", "textwrap", ".", "dedent", "(", "'\\n ...-e hg+...#egg=version_pkg\\n ...\\n '", ")", ".", "strip", "(", ")", "_check_output", "(", "result", ".", "stdout", ",", "expected", ")", "result", "=", "script", ".", "pip", "(", "'freeze'", ",", "'-f'", ",", "(", "'%s#egg=pip_test_package'", "%", "repo_dir", ")", ",", "expect_stderr", "=", "True", ")", "expected", "=", "textwrap", ".", "dedent", "(", "(", "'\\n -f %(repo)s#egg=pip_test_package...\\n ...-e hg+...#egg=version_pkg\\n ...\\n '", "%", "{", "'repo'", ":", "repo_dir", "}", ")", ")", ".", "strip", "(", ")", "_check_output", "(", "result", ".", "stdout", ",", "expected", ")" ]
test freezing a mercurial clone .
train
false
2,600
def rootAuthority(xri): if xri.startswith('xri://'): xri = xri[6:] authority = xri.split('/', 1)[0] if (authority[0] == '('): root = authority[:(authority.index(')') + 1)] elif (authority[0] in XRI_AUTHORITIES): root = authority[0] else: segments = authority.split('!') segments = reduce(list.__add__, map((lambda s: s.split('*')), segments)) root = segments[0] return XRI(root)
[ "def", "rootAuthority", "(", "xri", ")", ":", "if", "xri", ".", "startswith", "(", "'xri://'", ")", ":", "xri", "=", "xri", "[", "6", ":", "]", "authority", "=", "xri", ".", "split", "(", "'/'", ",", "1", ")", "[", "0", "]", "if", "(", "authority", "[", "0", "]", "==", "'('", ")", ":", "root", "=", "authority", "[", ":", "(", "authority", ".", "index", "(", "')'", ")", "+", "1", ")", "]", "elif", "(", "authority", "[", "0", "]", "in", "XRI_AUTHORITIES", ")", ":", "root", "=", "authority", "[", "0", "]", "else", ":", "segments", "=", "authority", ".", "split", "(", "'!'", ")", "segments", "=", "reduce", "(", "list", ".", "__add__", ",", "map", "(", "(", "lambda", "s", ":", "s", ".", "split", "(", "'*'", ")", ")", ",", "segments", ")", ")", "root", "=", "segments", "[", "0", "]", "return", "XRI", "(", "root", ")" ]
return the root authority for an xri .
train
true
2,602
def PutAllEntities(entities): for entity in entities: datastore.Put(entity)
[ "def", "PutAllEntities", "(", "entities", ")", ":", "for", "entity", "in", "entities", ":", "datastore", ".", "Put", "(", "entity", ")" ]
puts all entities to the current datastore .
train
false
2,603
def configuration_context(request): return {'platform_name': configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)}
[ "def", "configuration_context", "(", "request", ")", ":", "return", "{", "'platform_name'", ":", "configuration_helpers", ".", "get_value", "(", "'platform_name'", ",", "settings", ".", "PLATFORM_NAME", ")", "}" ]
configuration context for django templates .
train
false
2,605
def htmlsafe_dump(obj, fp, **kwargs): fp.write(unicode(htmlsafe_dumps(obj, **kwargs)))
[ "def", "htmlsafe_dump", "(", "obj", ",", "fp", ",", "**", "kwargs", ")", ":", "fp", ".", "write", "(", "unicode", "(", "htmlsafe_dumps", "(", "obj", ",", "**", "kwargs", ")", ")", ")" ]
like :func:htmlsafe_dumps but writes into a file object .
train
true
2,606
def validate_positive_scalars(**kwargs): for (key, val) in kwargs.items(): try: if (val <= 0): raise ValueError('{} must be > 0, got {}'.format(key, val)) except TypeError: raise exceptions.PlotlyError('{} must be a number, got {}'.format(key, val))
[ "def", "validate_positive_scalars", "(", "**", "kwargs", ")", ":", "for", "(", "key", ",", "val", ")", "in", "kwargs", ".", "items", "(", ")", ":", "try", ":", "if", "(", "val", "<=", "0", ")", ":", "raise", "ValueError", "(", "'{} must be > 0, got {}'", ".", "format", "(", "key", ",", "val", ")", ")", "except", "TypeError", ":", "raise", "exceptions", ".", "PlotlyError", "(", "'{} must be a number, got {}'", ".", "format", "(", "key", ",", "val", ")", ")" ]
validates that all values given in key/val pairs are positive .
train
false
2,607
def _postorder_traverse(root, get_children): def dfs(elem): for v in get_children(elem): for u in dfs(v): (yield u) (yield elem) for elem in dfs(root): (yield elem)
[ "def", "_postorder_traverse", "(", "root", ",", "get_children", ")", ":", "def", "dfs", "(", "elem", ")", ":", "for", "v", "in", "get_children", "(", "elem", ")", ":", "for", "u", "in", "dfs", "(", "v", ")", ":", "(", "yield", "u", ")", "(", "yield", "elem", ")", "for", "elem", "in", "dfs", "(", "root", ")", ":", "(", "yield", "elem", ")" ]
traverse a tree in depth-first post-order .
train
false
2,608
def splitQuoted(s): s = s.strip() result = [] word = [] inQuote = inWord = False for (i, c) in enumerate(iterbytes(s)): if (c == '"'): if (i and (s[(i - 1):i] == '\\')): word.pop() word.append('"') elif (not inQuote): inQuote = True else: inQuote = False result.append(''.join(word)) word = [] elif ((not inWord) and (not inQuote) and (c not in ('"' + string.whitespace.encode('ascii')))): inWord = True word.append(c) elif (inWord and (not inQuote) and (c in string.whitespace.encode('ascii'))): w = ''.join(word) if (w == 'NIL'): result.append(None) else: result.append(w) word = [] inWord = False elif (inWord or inQuote): word.append(c) if inQuote: raise MismatchedQuoting(s) if inWord: w = ''.join(word) if (w == 'NIL'): result.append(None) else: result.append(w) return result
[ "def", "splitQuoted", "(", "s", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "result", "=", "[", "]", "word", "=", "[", "]", "inQuote", "=", "inWord", "=", "False", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "iterbytes", "(", "s", ")", ")", ":", "if", "(", "c", "==", "'\"'", ")", ":", "if", "(", "i", "and", "(", "s", "[", "(", "i", "-", "1", ")", ":", "i", "]", "==", "'\\\\'", ")", ")", ":", "word", ".", "pop", "(", ")", "word", ".", "append", "(", "'\"'", ")", "elif", "(", "not", "inQuote", ")", ":", "inQuote", "=", "True", "else", ":", "inQuote", "=", "False", "result", ".", "append", "(", "''", ".", "join", "(", "word", ")", ")", "word", "=", "[", "]", "elif", "(", "(", "not", "inWord", ")", "and", "(", "not", "inQuote", ")", "and", "(", "c", "not", "in", "(", "'\"'", "+", "string", ".", "whitespace", ".", "encode", "(", "'ascii'", ")", ")", ")", ")", ":", "inWord", "=", "True", "word", ".", "append", "(", "c", ")", "elif", "(", "inWord", "and", "(", "not", "inQuote", ")", "and", "(", "c", "in", "string", ".", "whitespace", ".", "encode", "(", "'ascii'", ")", ")", ")", ":", "w", "=", "''", ".", "join", "(", "word", ")", "if", "(", "w", "==", "'NIL'", ")", ":", "result", ".", "append", "(", "None", ")", "else", ":", "result", ".", "append", "(", "w", ")", "word", "=", "[", "]", "inWord", "=", "False", "elif", "(", "inWord", "or", "inQuote", ")", ":", "word", ".", "append", "(", "c", ")", "if", "inQuote", ":", "raise", "MismatchedQuoting", "(", "s", ")", "if", "inWord", ":", "w", "=", "''", ".", "join", "(", "word", ")", "if", "(", "w", "==", "'NIL'", ")", ":", "result", ".", "append", "(", "None", ")", "else", ":", "result", ".", "append", "(", "w", ")", "return", "result" ]
like string .
train
false
2,610
def _stringify_path(filepath_or_buffer): if (_PATHLIB_INSTALLED and isinstance(filepath_or_buffer, pathlib.Path)): return text_type(filepath_or_buffer) if (_PY_PATH_INSTALLED and isinstance(filepath_or_buffer, LocalPath)): return filepath_or_buffer.strpath return filepath_or_buffer
[ "def", "_stringify_path", "(", "filepath_or_buffer", ")", ":", "if", "(", "_PATHLIB_INSTALLED", "and", "isinstance", "(", "filepath_or_buffer", ",", "pathlib", ".", "Path", ")", ")", ":", "return", "text_type", "(", "filepath_or_buffer", ")", "if", "(", "_PY_PATH_INSTALLED", "and", "isinstance", "(", "filepath_or_buffer", ",", "LocalPath", ")", ")", ":", "return", "filepath_or_buffer", ".", "strpath", "return", "filepath_or_buffer" ]
return the argument coerced to a string if it was a pathlib .
train
false
2,611
@contextmanager def pending_logging(): logger = logging.getLogger() memhandler = MemoryHandler() try: handlers = [] for handler in logger.handlers[:]: logger.removeHandler(handler) handlers.append(handler) logger.addHandler(memhandler) (yield memhandler) finally: logger.removeHandler(memhandler) for handler in handlers: logger.addHandler(handler) memhandler.flushTo(logger)
[ "@", "contextmanager", "def", "pending_logging", "(", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "memhandler", "=", "MemoryHandler", "(", ")", "try", ":", "handlers", "=", "[", "]", "for", "handler", "in", "logger", ".", "handlers", "[", ":", "]", ":", "logger", ".", "removeHandler", "(", "handler", ")", "handlers", ".", "append", "(", "handler", ")", "logger", ".", "addHandler", "(", "memhandler", ")", "(", "yield", "memhandler", ")", "finally", ":", "logger", ".", "removeHandler", "(", "memhandler", ")", "for", "handler", "in", "handlers", ":", "logger", ".", "addHandler", "(", "handler", ")", "memhandler", ".", "flushTo", "(", "logger", ")" ]
contextmanager to pend logging all logs temporary .
train
false
2,613
def parse_config(config_file=None): if (config_file is None): config_file = _config_file() ret = {} if _check_config_exists(config_file): with salt.utils.fopen(config_file) as ifile: for line in ifile: (key, val) = line.split('=') ret[key] = val return ret return 'Could not find {0} on file system'.format(config_file)
[ "def", "parse_config", "(", "config_file", "=", "None", ")", ":", "if", "(", "config_file", "is", "None", ")", ":", "config_file", "=", "_config_file", "(", ")", "ret", "=", "{", "}", "if", "_check_config_exists", "(", "config_file", ")", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "config_file", ")", "as", "ifile", ":", "for", "line", "in", "ifile", ":", "(", "key", ",", "val", ")", "=", "line", ".", "split", "(", "'='", ")", "ret", "[", "key", "]", "=", "val", "return", "ret", "return", "'Could not find {0} on file system'", ".", "format", "(", "config_file", ")" ]
parse the zarp config file .
train
true
2,614
def deepmind_rmsprop(loss_or_grads, params, learning_rate, rho, epsilon): grads = get_or_compute_grads(loss_or_grads, params) updates = OrderedDict() for (param, grad) in zip(params, grads): value = param.get_value(borrow=True) acc_grad = theano.shared(np.zeros(value.shape, dtype=value.dtype), broadcastable=param.broadcastable) acc_grad_new = ((rho * acc_grad) + ((1 - rho) * grad)) acc_rms = theano.shared(np.zeros(value.shape, dtype=value.dtype), broadcastable=param.broadcastable) acc_rms_new = ((rho * acc_rms) + ((1 - rho) * (grad ** 2))) updates[acc_grad] = acc_grad_new updates[acc_rms] = acc_rms_new updates[param] = (param - (learning_rate * (grad / T.sqrt(((acc_rms_new - (acc_grad_new ** 2)) + epsilon))))) return updates
[ "def", "deepmind_rmsprop", "(", "loss_or_grads", ",", "params", ",", "learning_rate", ",", "rho", ",", "epsilon", ")", ":", "grads", "=", "get_or_compute_grads", "(", "loss_or_grads", ",", "params", ")", "updates", "=", "OrderedDict", "(", ")", "for", "(", "param", ",", "grad", ")", "in", "zip", "(", "params", ",", "grads", ")", ":", "value", "=", "param", ".", "get_value", "(", "borrow", "=", "True", ")", "acc_grad", "=", "theano", ".", "shared", "(", "np", ".", "zeros", "(", "value", ".", "shape", ",", "dtype", "=", "value", ".", "dtype", ")", ",", "broadcastable", "=", "param", ".", "broadcastable", ")", "acc_grad_new", "=", "(", "(", "rho", "*", "acc_grad", ")", "+", "(", "(", "1", "-", "rho", ")", "*", "grad", ")", ")", "acc_rms", "=", "theano", ".", "shared", "(", "np", ".", "zeros", "(", "value", ".", "shape", ",", "dtype", "=", "value", ".", "dtype", ")", ",", "broadcastable", "=", "param", ".", "broadcastable", ")", "acc_rms_new", "=", "(", "(", "rho", "*", "acc_rms", ")", "+", "(", "(", "1", "-", "rho", ")", "*", "(", "grad", "**", "2", ")", ")", ")", "updates", "[", "acc_grad", "]", "=", "acc_grad_new", "updates", "[", "acc_rms", "]", "=", "acc_rms_new", "updates", "[", "param", "]", "=", "(", "param", "-", "(", "learning_rate", "*", "(", "grad", "/", "T", ".", "sqrt", "(", "(", "(", "acc_rms_new", "-", "(", "acc_grad_new", "**", "2", ")", ")", "+", "epsilon", ")", ")", ")", ")", ")", "return", "updates" ]
rmsprop updates [1]_ .
train
false
2,615
def test_roberts_zeros(): result = filters.roberts(np.zeros((10, 10)), np.ones((10, 10), bool)) assert np.all((result == 0))
[ "def", "test_roberts_zeros", "(", ")", ":", "result", "=", "filters", ".", "roberts", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert", "np", ".", "all", "(", "(", "result", "==", "0", ")", ")" ]
roberts filter on an array of all zeros .
train
false
2,616
@_define_event def post_run_cell(): pass
[ "@", "_define_event", "def", "post_run_cell", "(", ")", ":", "pass" ]
fires after user-entered code runs .
train
false
2,618
@importorskip('gi.repository') @parametrize('repository_name', gi2_repository_names_skipped_if_unimportable, ids=gi2_repository_names) def test_gi2_repository(pyi_builder, repository_name): pyi_builder.test_source("\n import gi\n gi.require_version('{repository_name}', '2.0')\n from gi.repository import {repository_name}\n print({repository_name})\n ".format(repository_name=repository_name))
[ "@", "importorskip", "(", "'gi.repository'", ")", "@", "parametrize", "(", "'repository_name'", ",", "gi2_repository_names_skipped_if_unimportable", ",", "ids", "=", "gi2_repository_names", ")", "def", "test_gi2_repository", "(", "pyi_builder", ",", "repository_name", ")", ":", "pyi_builder", ".", "test_source", "(", "\"\\n import gi\\n gi.require_version('{repository_name}', '2.0')\\n from gi.repository import {repository_name}\\n print({repository_name})\\n \"", ".", "format", "(", "repository_name", "=", "repository_name", ")", ")" ]
test the importability of the gi .
train
false
2,620
def survey_getAllSectionsForSeries(series_id): table = current.s3db.survey_series row = current.db((table.id == series_id)).select(table.template_id, limitby=(0, 1)).first() return survey_getAllSectionsForTemplate(row.template_id)
[ "def", "survey_getAllSectionsForSeries", "(", "series_id", ")", ":", "table", "=", "current", ".", "s3db", ".", "survey_series", "row", "=", "current", ".", "db", "(", "(", "table", ".", "id", "==", "series_id", ")", ")", ".", "select", "(", "table", ".", "template_id", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "return", "survey_getAllSectionsForTemplate", "(", "row", ".", "template_id", ")" ]
function to return the list of sections for the given series the sections are returned in the order of their position in the template .
train
false
2,621
def get_oauth_pin(oauth_url, open_browser=True): print(('Opening: %s\n' % oauth_url)) if open_browser: print('\nIn the web browser window that opens please choose to Allow\naccess. Copy the PIN number that appears on the next page and paste or\ntype it here:\n ') try: r = webbrowser.open(oauth_url) time.sleep(2) if (not r): raise Exception() except: print(("\nUh, I couldn't open a browser on your computer. Please go here to get\nyour PIN:\n\n" + oauth_url)) else: print(('\nPlease go to the following URL, authorize the app, and copy the PIN:\n\n' + oauth_url)) return _input('Please enter the PIN: ').strip()
[ "def", "get_oauth_pin", "(", "oauth_url", ",", "open_browser", "=", "True", ")", ":", "print", "(", "(", "'Opening: %s\\n'", "%", "oauth_url", ")", ")", "if", "open_browser", ":", "print", "(", "'\\nIn the web browser window that opens please choose to Allow\\naccess. Copy the PIN number that appears on the next page and paste or\\ntype it here:\\n '", ")", "try", ":", "r", "=", "webbrowser", ".", "open", "(", "oauth_url", ")", "time", ".", "sleep", "(", "2", ")", "if", "(", "not", "r", ")", ":", "raise", "Exception", "(", ")", "except", ":", "print", "(", "(", "\"\\nUh, I couldn't open a browser on your computer. Please go here to get\\nyour PIN:\\n\\n\"", "+", "oauth_url", ")", ")", "else", ":", "print", "(", "(", "'\\nPlease go to the following URL, authorize the app, and copy the PIN:\\n\\n'", "+", "oauth_url", ")", ")", "return", "_input", "(", "'Please enter the PIN: '", ")", ".", "strip", "(", ")" ]
prompt the user for the oauth pin .
train
false
2,622
def get_tukeyQcrit(k, df, alpha=0.05): if (alpha == 0.05): intp = interpolate.interp1d(crows, cv005[:, (k - 2)]) elif (alpha == 0.01): intp = interpolate.interp1d(crows, cv001[:, (k - 2)]) else: raise ValueError('only implemented for alpha equal to 0.01 and 0.05') return intp(df)
[ "def", "get_tukeyQcrit", "(", "k", ",", "df", ",", "alpha", "=", "0.05", ")", ":", "if", "(", "alpha", "==", "0.05", ")", ":", "intp", "=", "interpolate", ".", "interp1d", "(", "crows", ",", "cv005", "[", ":", ",", "(", "k", "-", "2", ")", "]", ")", "elif", "(", "alpha", "==", "0.01", ")", ":", "intp", "=", "interpolate", ".", "interp1d", "(", "crows", ",", "cv001", "[", ":", ",", "(", "k", "-", "2", ")", "]", ")", "else", ":", "raise", "ValueError", "(", "'only implemented for alpha equal to 0.01 and 0.05'", ")", "return", "intp", "(", "df", ")" ]
return critical values for tukeys hsd (q) parameters k : int in {2 .
train
false
2,623
@contextlib.contextmanager def on_assert_failed_print_details(actual, expected): try: (yield) except AssertionError: diff = difflib.ndiff(expected.splitlines(), actual.splitlines()) diff_text = u'\n'.join(diff) print(u'DIFF (+ ACTUAL, - EXPECTED):\n{0}\n'.format(diff_text)) if DEBUG: print(u'expected:\n{0}\n'.format(expected)) print(u'actual:\n{0}\n'.format(actual)) raise
[ "@", "contextlib", ".", "contextmanager", "def", "on_assert_failed_print_details", "(", "actual", ",", "expected", ")", ":", "try", ":", "(", "yield", ")", "except", "AssertionError", ":", "diff", "=", "difflib", ".", "ndiff", "(", "expected", ".", "splitlines", "(", ")", ",", "actual", ".", "splitlines", "(", ")", ")", "diff_text", "=", "u'\\n'", ".", "join", "(", "diff", ")", "print", "(", "u'DIFF (+ ACTUAL, - EXPECTED):\\n{0}\\n'", ".", "format", "(", "diff_text", ")", ")", "if", "DEBUG", ":", "print", "(", "u'expected:\\n{0}\\n'", ".", "format", "(", "expected", ")", ")", "print", "(", "u'actual:\\n{0}\\n'", ".", "format", "(", "actual", ")", ")", "raise" ]
print text details in case of assertation failed errors .
train
false
2,625
def add_event(c, type, payload): SQL = '\n INSERT INTO events (type, payload)\n VALUES (%s, %s)\n ' c.run(SQL, (type, psycopg2.extras.Json(payload)))
[ "def", "add_event", "(", "c", ",", "type", ",", "payload", ")", ":", "SQL", "=", "'\\n INSERT INTO events (type, payload)\\n VALUES (%s, %s)\\n '", "c", ".", "run", "(", "SQL", ",", "(", "type", ",", "psycopg2", ".", "extras", ".", "Json", "(", "payload", ")", ")", ")" ]
log an event .
train
false
2,628
def to_sympy(m, **options): if isinstance(m, Matrix): return m elif isinstance(m, numpy_ndarray): return numpy_to_sympy(m) elif isinstance(m, scipy_sparse_matrix): return scipy_sparse_to_sympy(m) elif isinstance(m, Expr): return m raise TypeError(('Expected sympy/numpy/scipy.sparse matrix, got: %r' % m))
[ "def", "to_sympy", "(", "m", ",", "**", "options", ")", ":", "if", "isinstance", "(", "m", ",", "Matrix", ")", ":", "return", "m", "elif", "isinstance", "(", "m", ",", "numpy_ndarray", ")", ":", "return", "numpy_to_sympy", "(", "m", ")", "elif", "isinstance", "(", "m", ",", "scipy_sparse_matrix", ")", ":", "return", "scipy_sparse_to_sympy", "(", "m", ")", "elif", "isinstance", "(", "m", ",", "Expr", ")", ":", "return", "m", "raise", "TypeError", "(", "(", "'Expected sympy/numpy/scipy.sparse matrix, got: %r'", "%", "m", ")", ")" ]
convert a numpy/scipy .
train
false
2,629
@secure_required @permission_required_or_403('change_user', (get_user_model(), 'username', 'username')) def email_change(request, username, email_form=ChangeEmailForm, template_name='userena/email_form.html', success_url=None, extra_context=None): user = get_object_or_404(get_user_model(), username__iexact=username) prev_email = user.email form = email_form(user) if (request.method == 'POST'): form = email_form(user, request.POST, request.FILES) if form.is_valid(): form.save() if success_url: userena_signals.email_change.send(sender=None, user=user, prev_email=prev_email, new_email=user.email) redirect_to = success_url else: redirect_to = reverse('userena_email_change_complete', kwargs={'username': user.username}) return redirect(redirect_to) if (not extra_context): extra_context = dict() extra_context['form'] = form extra_context['profile'] = get_user_profile(user=user) return ExtraContextTemplateView.as_view(template_name=template_name, extra_context=extra_context)(request)
[ "@", "secure_required", "@", "permission_required_or_403", "(", "'change_user'", ",", "(", "get_user_model", "(", ")", ",", "'username'", ",", "'username'", ")", ")", "def", "email_change", "(", "request", ",", "username", ",", "email_form", "=", "ChangeEmailForm", ",", "template_name", "=", "'userena/email_form.html'", ",", "success_url", "=", "None", ",", "extra_context", "=", "None", ")", ":", "user", "=", "get_object_or_404", "(", "get_user_model", "(", ")", ",", "username__iexact", "=", "username", ")", "prev_email", "=", "user", ".", "email", "form", "=", "email_form", "(", "user", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "form", "=", "email_form", "(", "user", ",", "request", ".", "POST", ",", "request", ".", "FILES", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", ")", "if", "success_url", ":", "userena_signals", ".", "email_change", ".", "send", "(", "sender", "=", "None", ",", "user", "=", "user", ",", "prev_email", "=", "prev_email", ",", "new_email", "=", "user", ".", "email", ")", "redirect_to", "=", "success_url", "else", ":", "redirect_to", "=", "reverse", "(", "'userena_email_change_complete'", ",", "kwargs", "=", "{", "'username'", ":", "user", ".", "username", "}", ")", "return", "redirect", "(", "redirect_to", ")", "if", "(", "not", "extra_context", ")", ":", "extra_context", "=", "dict", "(", ")", "extra_context", "[", "'form'", "]", "=", "form", "extra_context", "[", "'profile'", "]", "=", "get_user_profile", "(", "user", "=", "user", ")", "return", "ExtraContextTemplateView", ".", "as_view", "(", "template_name", "=", "template_name", ",", "extra_context", "=", "extra_context", ")", "(", "request", ")" ]
change email address .
train
true
2,630
def abstractmethod(funcobj): funcobj.__isabstractmethod__ = True return funcobj
[ "def", "abstractmethod", "(", "funcobj", ")", ":", "funcobj", ".", "__isabstractmethod__", "=", "True", "return", "funcobj" ]
a decorator indicating abstract methods .
train
false
2,632
def test_trivial(): pass
[ "def", "test_trivial", "(", ")", ":", "pass" ]
a trivial passing test .
train
false
2,635
def test_issue309(en_tokenizer): tokens = en_tokenizer(u' ') doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=[0], deps=[u'ROOT']) doc.is_parsed = True assert (len(doc) == 1) sents = list(doc.sents) assert (len(sents) == 1)
[ "def", "test_issue309", "(", "en_tokenizer", ")", ":", "tokens", "=", "en_tokenizer", "(", "u' '", ")", "doc", "=", "get_doc", "(", "tokens", ".", "vocab", ",", "[", "t", ".", "text", "for", "t", "in", "tokens", "]", ",", "heads", "=", "[", "0", "]", ",", "deps", "=", "[", "u'ROOT'", "]", ")", "doc", ".", "is_parsed", "=", "True", "assert", "(", "len", "(", "doc", ")", "==", "1", ")", "sents", "=", "list", "(", "doc", ".", "sents", ")", "assert", "(", "len", "(", "sents", ")", "==", "1", ")" ]
test issue #309: sbd fails on empty string .
train
false
2,636
def LoadSingleConf(stream): return yaml_object.BuildSingleObject(YAMLConfiguration, stream)
[ "def", "LoadSingleConf", "(", "stream", ")", ":", "return", "yaml_object", ".", "BuildSingleObject", "(", "YAMLConfiguration", ",", "stream", ")" ]
load a conf .
train
false
2,638
def get_shared_secret(): try: fd = open('/var/lib/cobbler/web.ss') data = fd.read() except: return (-1) return str(data).strip()
[ "def", "get_shared_secret", "(", ")", ":", "try", ":", "fd", "=", "open", "(", "'/var/lib/cobbler/web.ss'", ")", "data", "=", "fd", ".", "read", "(", ")", "except", ":", "return", "(", "-", "1", ")", "return", "str", "(", "data", ")", ".", "strip", "(", ")" ]
the web .
train
false
2,639
def _add_new_repo(alias, uri, compressed, enabled=True): repostr = ('# ' if (not enabled) else '') repostr += ('src/gz ' if compressed else 'src ') repostr += (((alias + ' ') + uri) + '\n') conffile = os.path.join(OPKG_CONFDIR, (alias + '.conf')) with open(conffile, 'a') as fhandle: fhandle.write(repostr)
[ "def", "_add_new_repo", "(", "alias", ",", "uri", ",", "compressed", ",", "enabled", "=", "True", ")", ":", "repostr", "=", "(", "'# '", "if", "(", "not", "enabled", ")", "else", "''", ")", "repostr", "+=", "(", "'src/gz '", "if", "compressed", "else", "'src '", ")", "repostr", "+=", "(", "(", "(", "alias", "+", "' '", ")", "+", "uri", ")", "+", "'\\n'", ")", "conffile", "=", "os", ".", "path", ".", "join", "(", "OPKG_CONFDIR", ",", "(", "alias", "+", "'.conf'", ")", ")", "with", "open", "(", "conffile", ",", "'a'", ")", "as", "fhandle", ":", "fhandle", ".", "write", "(", "repostr", ")" ]
add a new repo entry .
train
false
2,641
def _sanitize_url_components(comp_list, field): if (len(comp_list) == 0): return '' elif comp_list[0].startswith('{0}='.format(field)): ret = '{0}=XXXXXXXXXX&'.format(field) comp_list.remove(comp_list[0]) return (ret + _sanitize_url_components(comp_list, field)) else: ret = '{0}&'.format(comp_list[0]) comp_list.remove(comp_list[0]) return (ret + _sanitize_url_components(comp_list, field))
[ "def", "_sanitize_url_components", "(", "comp_list", ",", "field", ")", ":", "if", "(", "len", "(", "comp_list", ")", "==", "0", ")", ":", "return", "''", "elif", "comp_list", "[", "0", "]", ".", "startswith", "(", "'{0}='", ".", "format", "(", "field", ")", ")", ":", "ret", "=", "'{0}=XXXXXXXXXX&'", ".", "format", "(", "field", ")", "comp_list", ".", "remove", "(", "comp_list", "[", "0", "]", ")", "return", "(", "ret", "+", "_sanitize_url_components", "(", "comp_list", ",", "field", ")", ")", "else", ":", "ret", "=", "'{0}&'", ".", "format", "(", "comp_list", "[", "0", "]", ")", "comp_list", ".", "remove", "(", "comp_list", "[", "0", "]", ")", "return", "(", "ret", "+", "_sanitize_url_components", "(", "comp_list", ",", "field", ")", ")" ]
recursive function to sanitize each component of the url .
train
true
2,643
def detect_mobile(view): def detected(request, *args, **kwargs): Middleware.process_request(request) return view(request, *args, **kwargs) detected.__doc__ = ('%s\n[Wrapped by detect_mobile which detects if the request is from a phone]' % view.__doc__) return detected
[ "def", "detect_mobile", "(", "view", ")", ":", "def", "detected", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "Middleware", ".", "process_request", "(", "request", ")", "return", "view", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "detected", ".", "__doc__", "=", "(", "'%s\\n[Wrapped by detect_mobile which detects if the request is from a phone]'", "%", "view", ".", "__doc__", ")", "return", "detected" ]
view decorator that adds a "mobile" attribute to the request which is true or false depending on whether the request should be considered to come from a small-screen device such as a phone or a pda .
train
false
2,644
def traverse_hiearchy(service): accounts = service.management().accounts().list().execute() print_accounts(accounts) if accounts.get('items'): firstAccountId = accounts.get('items')[0].get('id') webproperties = service.management().webproperties().list(accountId=firstAccountId).execute() print_webproperties(webproperties) if webproperties.get('items'): firstWebpropertyId = webproperties.get('items')[0].get('id') profiles = service.management().profiles().list(accountId=firstAccountId, webPropertyId=firstWebpropertyId).execute() print_profiles(profiles) if profiles.get('items'): firstProfileId = profiles.get('items')[0].get('id') goals = service.management().goals().list(accountId=firstAccountId, webPropertyId=firstWebpropertyId, profileId=firstProfileId).execute() print_goals(goals) print_segments(service.management().segments().list().execute())
[ "def", "traverse_hiearchy", "(", "service", ")", ":", "accounts", "=", "service", ".", "management", "(", ")", ".", "accounts", "(", ")", ".", "list", "(", ")", ".", "execute", "(", ")", "print_accounts", "(", "accounts", ")", "if", "accounts", ".", "get", "(", "'items'", ")", ":", "firstAccountId", "=", "accounts", ".", "get", "(", "'items'", ")", "[", "0", "]", ".", "get", "(", "'id'", ")", "webproperties", "=", "service", ".", "management", "(", ")", ".", "webproperties", "(", ")", ".", "list", "(", "accountId", "=", "firstAccountId", ")", ".", "execute", "(", ")", "print_webproperties", "(", "webproperties", ")", "if", "webproperties", ".", "get", "(", "'items'", ")", ":", "firstWebpropertyId", "=", "webproperties", ".", "get", "(", "'items'", ")", "[", "0", "]", ".", "get", "(", "'id'", ")", "profiles", "=", "service", ".", "management", "(", ")", ".", "profiles", "(", ")", ".", "list", "(", "accountId", "=", "firstAccountId", ",", "webPropertyId", "=", "firstWebpropertyId", ")", ".", "execute", "(", ")", "print_profiles", "(", "profiles", ")", "if", "profiles", ".", "get", "(", "'items'", ")", ":", "firstProfileId", "=", "profiles", ".", "get", "(", "'items'", ")", "[", "0", "]", ".", "get", "(", "'id'", ")", "goals", "=", "service", ".", "management", "(", ")", ".", "goals", "(", ")", ".", "list", "(", "accountId", "=", "firstAccountId", ",", "webPropertyId", "=", "firstWebpropertyId", ",", "profileId", "=", "firstProfileId", ")", ".", "execute", "(", ")", "print_goals", "(", "goals", ")", "print_segments", "(", "service", ".", "management", "(", ")", ".", "segments", "(", ")", ".", "list", "(", ")", ".", "execute", "(", ")", ")" ]
traverses the management api hiearchy and prints results .
train
false
2,645
def squelch_exceptions(fn): @functools.wraps(fn) def squelched_fn(*a, **kw): try: return fn(*a, **kw) except BaseException: if g.debug: raise else: g.log.exception('squelching exception') return squelched_fn
[ "def", "squelch_exceptions", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "squelched_fn", "(", "*", "a", ",", "**", "kw", ")", ":", "try", ":", "return", "fn", "(", "*", "a", ",", "**", "kw", ")", "except", "BaseException", ":", "if", "g", ".", "debug", ":", "raise", "else", ":", "g", ".", "log", ".", "exception", "(", "'squelching exception'", ")", "return", "squelched_fn" ]
wrap a function to log and suppress all internal exceptions when running in debug mode .
train
false
2,646
def constructor_copy(obj, cls, *args, **kw): names = get_cls_kwargs(cls) kw.update(((k, obj.__dict__[k]) for k in names.difference(kw) if (k in obj.__dict__))) return cls(*args, **kw)
[ "def", "constructor_copy", "(", "obj", ",", "cls", ",", "*", "args", ",", "**", "kw", ")", ":", "names", "=", "get_cls_kwargs", "(", "cls", ")", "kw", ".", "update", "(", "(", "(", "k", ",", "obj", ".", "__dict__", "[", "k", "]", ")", "for", "k", "in", "names", ".", "difference", "(", "kw", ")", "if", "(", "k", "in", "obj", ".", "__dict__", ")", ")", ")", "return", "cls", "(", "*", "args", ",", "**", "kw", ")" ]
instantiate cls using the __dict__ of obj as constructor arguments .
train
false
2,647
def _refine_module_state(module_state): module_state = str(module_state).lower() if (module_state in ('1', 'on', 'yes', 'true', 'enabled')): return 'enabled' if (module_state in ('0', 'off', 'no', 'false', 'disabled')): return 'disabled' return 'unknown'
[ "def", "_refine_module_state", "(", "module_state", ")", ":", "module_state", "=", "str", "(", "module_state", ")", ".", "lower", "(", ")", "if", "(", "module_state", "in", "(", "'1'", ",", "'on'", ",", "'yes'", ",", "'true'", ",", "'enabled'", ")", ")", ":", "return", "'enabled'", "if", "(", "module_state", "in", "(", "'0'", ",", "'off'", ",", "'no'", ",", "'false'", ",", "'disabled'", ")", ")", ":", "return", "'disabled'", "return", "'unknown'" ]
return a predictable value .
train
false
2,648
def read_data(source_path, target_path, buckets, EOS_ID, max_size=None): data_set = [[] for _ in buckets] with tf.gfile.GFile(source_path, mode='r') as source_file: with tf.gfile.GFile(target_path, mode='r') as target_file: (source, target) = (source_file.readline(), target_file.readline()) counter = 0 while (source and target and ((not max_size) or (counter < max_size))): counter += 1 if ((counter % 100000) == 0): print((' reading data line %d' % counter)) sys.stdout.flush() source_ids = [int(x) for x in source.split()] target_ids = [int(x) for x in target.split()] target_ids.append(EOS_ID) for (bucket_id, (source_size, target_size)) in enumerate(buckets): if ((len(source_ids) < source_size) and (len(target_ids) < target_size)): data_set[bucket_id].append([source_ids, target_ids]) break (source, target) = (source_file.readline(), target_file.readline()) return data_set
[ "def", "read_data", "(", "source_path", ",", "target_path", ",", "buckets", ",", "EOS_ID", ",", "max_size", "=", "None", ")", ":", "data_set", "=", "[", "[", "]", "for", "_", "in", "buckets", "]", "with", "tf", ".", "gfile", ".", "GFile", "(", "source_path", ",", "mode", "=", "'r'", ")", "as", "source_file", ":", "with", "tf", ".", "gfile", ".", "GFile", "(", "target_path", ",", "mode", "=", "'r'", ")", "as", "target_file", ":", "(", "source", ",", "target", ")", "=", "(", "source_file", ".", "readline", "(", ")", ",", "target_file", ".", "readline", "(", ")", ")", "counter", "=", "0", "while", "(", "source", "and", "target", "and", "(", "(", "not", "max_size", ")", "or", "(", "counter", "<", "max_size", ")", ")", ")", ":", "counter", "+=", "1", "if", "(", "(", "counter", "%", "100000", ")", "==", "0", ")", ":", "print", "(", "(", "' reading data line %d'", "%", "counter", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "source_ids", "=", "[", "int", "(", "x", ")", "for", "x", "in", "source", ".", "split", "(", ")", "]", "target_ids", "=", "[", "int", "(", "x", ")", "for", "x", "in", "target", ".", "split", "(", ")", "]", "target_ids", ".", "append", "(", "EOS_ID", ")", "for", "(", "bucket_id", ",", "(", "source_size", ",", "target_size", ")", ")", "in", "enumerate", "(", "buckets", ")", ":", "if", "(", "(", "len", "(", "source_ids", ")", "<", "source_size", ")", "and", "(", "len", "(", "target_ids", ")", "<", "target_size", ")", ")", ":", "data_set", "[", "bucket_id", "]", ".", "append", "(", "[", "source_ids", ",", "target_ids", "]", ")", "break", "(", "source", ",", "target", ")", "=", "(", "source_file", ".", "readline", "(", ")", ",", "target_file", ".", "readline", "(", ")", ")", "return", "data_set" ]
read a given number of 32-bits unsigned integers from the given file with the given endianness .
train
false
2,649
def get_my_ip(): import socket try: import Pyro4 ns = Pyro4.naming.locateNS() s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((ns._pyroUri.host, ns._pyroUri.port)) (result, port) = s.getsockname() except: try: import commands result = commands.getoutput('ifconfig').split('\n')[1].split()[1][5:] if (len(result.split('.')) != 4): raise Exception() except: result = socket.gethostbyname(socket.gethostname()) return result
[ "def", "get_my_ip", "(", ")", ":", "import", "socket", "try", ":", "import", "Pyro4", "ns", "=", "Pyro4", ".", "naming", ".", "locateNS", "(", ")", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "s", ".", "connect", "(", "(", "ns", ".", "_pyroUri", ".", "host", ",", "ns", ".", "_pyroUri", ".", "port", ")", ")", "(", "result", ",", "port", ")", "=", "s", ".", "getsockname", "(", ")", "except", ":", "try", ":", "import", "commands", "result", "=", "commands", ".", "getoutput", "(", "'ifconfig'", ")", ".", "split", "(", "'\\n'", ")", "[", "1", "]", ".", "split", "(", ")", "[", "1", "]", "[", "5", ":", "]", "if", "(", "len", "(", "result", ".", "split", "(", "'.'", ")", ")", "!=", "4", ")", ":", "raise", "Exception", "(", ")", "except", ":", "result", "=", "socket", ".", "gethostbyname", "(", "socket", ".", "gethostname", "(", ")", ")", "return", "result" ]
try to obtain our external ip this tries to sidestep the issue of bogus /etc/hosts entries and other local misconfigurations .
train
false
2,650
def show_queue(): cmd = 'mailq' out = __salt__['cmd.run'](cmd).splitlines() queue = [] queue_pattern = re.compile('(?P<queue_id>^[A-Z0-9]+)\\s+(?P<size>\\d+)\\s(?P<timestamp>\\w{3}\\s\\w{3}\\s\\d{1,2}\\s\\d{2}\\:\\d{2}\\:\\d{2})\\s+(?P<sender>.+)') recipient_pattern = re.compile('^\\s+(?P<recipient>.+)') for line in out: if re.match('^[-|postqueue:|Mail]', line): continue if re.match(queue_pattern, line): m = re.match(queue_pattern, line) queue_id = m.group('queue_id') size = m.group('size') timestamp = m.group('timestamp') sender = m.group('sender') elif re.match(recipient_pattern, line): m = re.match(recipient_pattern, line) recipient = m.group('recipient') elif (not line): queue.append({'queue_id': queue_id, 'size': size, 'timestamp': timestamp, 'sender': sender, 'recipient': recipient}) return queue
[ "def", "show_queue", "(", ")", ":", "cmd", "=", "'mailq'", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "queue", "=", "[", "]", "queue_pattern", "=", "re", ".", "compile", "(", "'(?P<queue_id>^[A-Z0-9]+)\\\\s+(?P<size>\\\\d+)\\\\s(?P<timestamp>\\\\w{3}\\\\s\\\\w{3}\\\\s\\\\d{1,2}\\\\s\\\\d{2}\\\\:\\\\d{2}\\\\:\\\\d{2})\\\\s+(?P<sender>.+)'", ")", "recipient_pattern", "=", "re", ".", "compile", "(", "'^\\\\s+(?P<recipient>.+)'", ")", "for", "line", "in", "out", ":", "if", "re", ".", "match", "(", "'^[-|postqueue:|Mail]'", ",", "line", ")", ":", "continue", "if", "re", ".", "match", "(", "queue_pattern", ",", "line", ")", ":", "m", "=", "re", ".", "match", "(", "queue_pattern", ",", "line", ")", "queue_id", "=", "m", ".", "group", "(", "'queue_id'", ")", "size", "=", "m", ".", "group", "(", "'size'", ")", "timestamp", "=", "m", ".", "group", "(", "'timestamp'", ")", "sender", "=", "m", ".", "group", "(", "'sender'", ")", "elif", "re", ".", "match", "(", "recipient_pattern", ",", "line", ")", ":", "m", "=", "re", ".", "match", "(", "recipient_pattern", ",", "line", ")", "recipient", "=", "m", ".", "group", "(", "'recipient'", ")", "elif", "(", "not", "line", ")", ":", "queue", ".", "append", "(", "{", "'queue_id'", ":", "queue_id", ",", "'size'", ":", "size", ",", "'timestamp'", ":", "timestamp", ",", "'sender'", ":", "sender", ",", "'recipient'", ":", "recipient", "}", ")", "return", "queue" ]
show contents of the mail queue cli example: .
train
true
2,652
def get_catalog_discover_hack(service_type, url): return _VERSION_HACKS.get_discover_hack(service_type, url)
[ "def", "get_catalog_discover_hack", "(", "service_type", ",", "url", ")", ":", "return", "_VERSION_HACKS", ".", "get_discover_hack", "(", "service_type", ",", "url", ")" ]
apply the catalog hacks and figure out an unversioned endpoint .
train
false
2,653
def _get_status_descr_by_id(status_id): for (status_name, status_data) in six.iteritems(LINODE_STATUS): if (status_data['code'] == int(status_id)): return status_data['descr'] return LINODE_STATUS.get(status_id, None)
[ "def", "_get_status_descr_by_id", "(", "status_id", ")", ":", "for", "(", "status_name", ",", "status_data", ")", "in", "six", ".", "iteritems", "(", "LINODE_STATUS", ")", ":", "if", "(", "status_data", "[", "'code'", "]", "==", "int", "(", "status_id", ")", ")", ":", "return", "status_data", "[", "'descr'", "]", "return", "LINODE_STATUS", ".", "get", "(", "status_id", ",", "None", ")" ]
return linode status by id status_id linode vm status id .
train
true
2,654
def convert_wo_prefix(string): factors = {'K': 1000, 'M': (1000 * 1000), 'G': ((1000 * 1000) * 1000), 'T': (((1000 * 1000) * 1000) * 1000), 'P': ((((1000 * 1000) * 1000) * 1000) * 1000), 'E': (((((1000 * 1000) * 1000) * 1000) * 1000) * 1000)} if (string == '-'): return (-1) for (f, fm) in factors.items(): if string.endswith(f): number = float(string[:(-1)]) number = (number * fm) return long(number) return long(string)
[ "def", "convert_wo_prefix", "(", "string", ")", ":", "factors", "=", "{", "'K'", ":", "1000", ",", "'M'", ":", "(", "1000", "*", "1000", ")", ",", "'G'", ":", "(", "(", "1000", "*", "1000", ")", "*", "1000", ")", ",", "'T'", ":", "(", "(", "(", "1000", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", ",", "'P'", ":", "(", "(", "(", "(", "1000", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", ",", "'E'", ":", "(", "(", "(", "(", "(", "1000", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", "*", "1000", ")", "}", "if", "(", "string", "==", "'-'", ")", ":", "return", "(", "-", "1", ")", "for", "(", "f", ",", "fm", ")", "in", "factors", ".", "items", "(", ")", ":", "if", "string", ".", "endswith", "(", "f", ")", ":", "number", "=", "float", "(", "string", "[", ":", "(", "-", "1", ")", "]", ")", "number", "=", "(", "number", "*", "fm", ")", "return", "long", "(", "number", ")", "return", "long", "(", "string", ")" ]
take a string in the form 1234k .
train
false
2,655
@simple_decorator def check_login_required(view_func): def _check(*args, **kwargs): siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get(u'auth_require_sitewide_login'): return login_required(view_func)(*args, **kwargs) else: return view_func(*args, **kwargs) return _check
[ "@", "simple_decorator", "def", "check_login_required", "(", "view_func", ")", ":", "def", "_check", "(", "*", "args", ",", "**", "kwargs", ")", ":", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "if", "siteconfig", ".", "get", "(", "u'auth_require_sitewide_login'", ")", ":", "return", "login_required", "(", "view_func", ")", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "view_func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "_check" ]
check whether the user needs to log in .
train
false
2,656
def _adjust(hsl, attribute, percent): hsl = list(hsl) if (attribute > 0): hsl[attribute] = _clamp((hsl[attribute] + percent)) else: hsl[attribute] += percent return hsl
[ "def", "_adjust", "(", "hsl", ",", "attribute", ",", "percent", ")", ":", "hsl", "=", "list", "(", "hsl", ")", "if", "(", "attribute", ">", "0", ")", ":", "hsl", "[", "attribute", "]", "=", "_clamp", "(", "(", "hsl", "[", "attribute", "]", "+", "percent", ")", ")", "else", ":", "hsl", "[", "attribute", "]", "+=", "percent", "return", "hsl" ]
internal adjust function .
train
true
2,657
def qnwgamma(n, a=None): return _make_multidim_func(_qnwgamma1, n, a)
[ "def", "qnwgamma", "(", "n", ",", "a", "=", "None", ")", ":", "return", "_make_multidim_func", "(", "_qnwgamma1", ",", "n", ",", "a", ")" ]
computes nodes and weights for gamma distribution parameters n : int or array_like a length-d iterable of the number of nodes in each dimension mu : scalar or array_like .
train
false
2,659
def bundle_kit(): s3.prep = (lambda r: (r.representation == 's3json')) return s3_rest_controller()
[ "def", "bundle_kit", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", "return", "s3_rest_controller", "(", ")" ]
rest controller to retrieve budget_bundle_kit field options .
train
false
2,660
def make_port_dict(port_id, port_state, net_id, attachment): res = {const.PORT_ID: port_id, const.PORT_STATE: port_state} res[const.NET_ID] = net_id res[const.ATTACHMENT] = attachment return res
[ "def", "make_port_dict", "(", "port_id", ",", "port_state", ",", "net_id", ",", "attachment", ")", ":", "res", "=", "{", "const", ".", "PORT_ID", ":", "port_id", ",", "const", ".", "PORT_STATE", ":", "port_state", "}", "res", "[", "const", ".", "NET_ID", "]", "=", "net_id", "res", "[", "const", ".", "ATTACHMENT", "]", "=", "attachment", "return", "res" ]
helper funciton .
train
false
2,661
def post_save_site(instance, sender, **kwargs): SiteResources.objects.get_or_create(site=instance) SitePeople.objects.get_or_create(site=instance)
[ "def", "post_save_site", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "SiteResources", ".", "objects", ".", "get_or_create", "(", "site", "=", "instance", ")", "SitePeople", ".", "objects", ".", "get_or_create", "(", "site", "=", "instance", ")" ]
signal to create the siteresources on site save .
train
false
2,664
def snappy_installed(): try: import snappy return True except ImportError: return False except: logging.exception('failed to verify if snappy is installed') return False
[ "def", "snappy_installed", "(", ")", ":", "try", ":", "import", "snappy", "return", "True", "except", "ImportError", ":", "return", "False", "except", ":", "logging", ".", "exception", "(", "'failed to verify if snappy is installed'", ")", "return", "False" ]
snappy is library that isnt supported by python2 .
train
false
2,666
def libvlc_audio_equalizer_get_preamp(p_equalizer): f = (_Cfunctions.get('libvlc_audio_equalizer_get_preamp', None) or _Cfunction('libvlc_audio_equalizer_get_preamp', ((1,),), None, ctypes.c_float, ctypes.c_void_p)) return f(p_equalizer)
[ "def", "libvlc_audio_equalizer_get_preamp", "(", "p_equalizer", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_equalizer_get_preamp'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_equalizer_get_preamp'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_float", ",", "ctypes", ".", "c_void_p", ")", ")", "return", "f", "(", "p_equalizer", ")" ]
get the current pre-amplification value from an equalizer .
train
true
2,667
def has_ipython(version='3.0'): version = str(version) try: import IPython except Exception: return (False, 'IPython library not found') else: if (LooseVersion(IPython.__version__) >= LooseVersion(version)): return (True, 'IPython present') else: message = ('current IPython version: (%s) is older than expected version: (%s)' % (IPython.__version__, version)) return (False, message)
[ "def", "has_ipython", "(", "version", "=", "'3.0'", ")", ":", "version", "=", "str", "(", "version", ")", "try", ":", "import", "IPython", "except", "Exception", ":", "return", "(", "False", ",", "'IPython library not found'", ")", "else", ":", "if", "(", "LooseVersion", "(", "IPython", ".", "__version__", ")", ">=", "LooseVersion", "(", "version", ")", ")", ":", "return", "(", "True", ",", "'IPython present'", ")", "else", ":", "message", "=", "(", "'current IPython version: (%s) is older than expected version: (%s)'", "%", "(", "IPython", ".", "__version__", ",", "version", ")", ")", "return", "(", "False", ",", "message", ")" ]
function that checks the presence of ipython .
train
false
2,668
def _get_adjtime_timezone(): adjtime_file = '/etc/adjtime' if os.path.exists(adjtime_file): cmd = ['tail', '-n', '1', adjtime_file] return __salt__['cmd.run'](cmd, python_shell=False) elif os.path.exists('/dev/rtc'): raise CommandExecutionError(('Unable to get hwclock timezone from ' + adjtime_file)) else: return None
[ "def", "_get_adjtime_timezone", "(", ")", ":", "adjtime_file", "=", "'/etc/adjtime'", "if", "os", ".", "path", ".", "exists", "(", "adjtime_file", ")", ":", "cmd", "=", "[", "'tail'", ",", "'-n'", ",", "'1'", ",", "adjtime_file", "]", "return", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "elif", "os", ".", "path", ".", "exists", "(", "'/dev/rtc'", ")", ":", "raise", "CommandExecutionError", "(", "(", "'Unable to get hwclock timezone from '", "+", "adjtime_file", ")", ")", "else", ":", "return", "None" ]
return the timezone in /etc/adjtime of the system clock .
train
true
2,669
def supported_locales(): family = distrib_family() if (family == 'debian'): return _parse_locales('/usr/share/i18n/SUPPORTED') elif (family == 'arch'): return _parse_locales('/etc/locale.gen') elif (family == 'redhat'): return _supported_locales_redhat() else: raise UnsupportedFamily(supported=['debian', 'arch', 'redhat'])
[ "def", "supported_locales", "(", ")", ":", "family", "=", "distrib_family", "(", ")", "if", "(", "family", "==", "'debian'", ")", ":", "return", "_parse_locales", "(", "'/usr/share/i18n/SUPPORTED'", ")", "elif", "(", "family", "==", "'arch'", ")", ":", "return", "_parse_locales", "(", "'/etc/locale.gen'", ")", "elif", "(", "family", "==", "'redhat'", ")", ":", "return", "_supported_locales_redhat", "(", ")", "else", ":", "raise", "UnsupportedFamily", "(", "supported", "=", "[", "'debian'", ",", "'arch'", ",", "'redhat'", "]", ")" ]
gets the list of supported locales .
train
true
2,670
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a fresh instance of the hash object .
train
false
2,672
def get_index_trap(*args, **kwargs): from conda.core.index import get_index kwargs.pop(u'json', None) return get_index(*args, **kwargs)
[ "def", "get_index_trap", "(", "*", "args", ",", "**", "kwargs", ")", ":", "from", "conda", ".", "core", ".", "index", "import", "get_index", "kwargs", ".", "pop", "(", "u'json'", ",", "None", ")", "return", "get_index", "(", "*", "args", ",", "**", "kwargs", ")" ]
retrieves the package index .
train
false
2,674
def encode_header(header_text): if (not header_text): return '' header_text_utf8 = ustr(header_text).encode('utf-8') header_text_ascii = try_coerce_ascii(header_text_utf8) return (header_text_ascii or Header(header_text_utf8, 'utf-8'))
[ "def", "encode_header", "(", "header_text", ")", ":", "if", "(", "not", "header_text", ")", ":", "return", "''", "header_text_utf8", "=", "ustr", "(", "header_text", ")", ".", "encode", "(", "'utf-8'", ")", "header_text_ascii", "=", "try_coerce_ascii", "(", "header_text_utf8", ")", "return", "(", "header_text_ascii", "or", "Header", "(", "header_text_utf8", ",", "'utf-8'", ")", ")" ]
returns an appropriate representation of the given header value .
train
false
2,676
def _write_fileobject(filename, compress=('zlib', 3)): compressmethod = compress[0] compresslevel = compress[1] if (compressmethod == 'gzip'): return _buffered_write_file(BinaryGzipFile(filename, 'wb', compresslevel=compresslevel)) elif (compressmethod == 'bz2'): return _buffered_write_file(bz2.BZ2File(filename, 'wb', compresslevel=compresslevel)) elif ((lzma is not None) and (compressmethod == 'xz')): return _buffered_write_file(lzma.LZMAFile(filename, 'wb', check=lzma.CHECK_NONE, preset=compresslevel)) elif ((lzma is not None) and (compressmethod == 'lzma')): return _buffered_write_file(lzma.LZMAFile(filename, 'wb', preset=compresslevel, format=lzma.FORMAT_ALONE)) else: return _buffered_write_file(BinaryZlibFile(filename, 'wb', compresslevel=compresslevel))
[ "def", "_write_fileobject", "(", "filename", ",", "compress", "=", "(", "'zlib'", ",", "3", ")", ")", ":", "compressmethod", "=", "compress", "[", "0", "]", "compresslevel", "=", "compress", "[", "1", "]", "if", "(", "compressmethod", "==", "'gzip'", ")", ":", "return", "_buffered_write_file", "(", "BinaryGzipFile", "(", "filename", ",", "'wb'", ",", "compresslevel", "=", "compresslevel", ")", ")", "elif", "(", "compressmethod", "==", "'bz2'", ")", ":", "return", "_buffered_write_file", "(", "bz2", ".", "BZ2File", "(", "filename", ",", "'wb'", ",", "compresslevel", "=", "compresslevel", ")", ")", "elif", "(", "(", "lzma", "is", "not", "None", ")", "and", "(", "compressmethod", "==", "'xz'", ")", ")", ":", "return", "_buffered_write_file", "(", "lzma", ".", "LZMAFile", "(", "filename", ",", "'wb'", ",", "check", "=", "lzma", ".", "CHECK_NONE", ",", "preset", "=", "compresslevel", ")", ")", "elif", "(", "(", "lzma", "is", "not", "None", ")", "and", "(", "compressmethod", "==", "'lzma'", ")", ")", ":", "return", "_buffered_write_file", "(", "lzma", ".", "LZMAFile", "(", "filename", ",", "'wb'", ",", "preset", "=", "compresslevel", ",", "format", "=", "lzma", ".", "FORMAT_ALONE", ")", ")", "else", ":", "return", "_buffered_write_file", "(", "BinaryZlibFile", "(", "filename", ",", "'wb'", ",", "compresslevel", "=", "compresslevel", ")", ")" ]
return the right compressor file object in write mode .
train
false
2,678
def perm_above(accessing_obj, accessed_obj, *args, **kwargs): kwargs['_greater_than'] = True return perm(accessing_obj, accessed_obj, *args, **kwargs)
[ "def", "perm_above", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'_greater_than'", "]", "=", "True", "return", "perm", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")" ]
only allow objects with a permission *higher* in the permission hierarchy than the one given .
train
false
2,679
def setFunctionLocalDictionary(arguments, function): function.localDictionary = {'_arguments': arguments} if (len(arguments) > 0): firstArgument = arguments[0] if (firstArgument.__class__ == dict): function.localDictionary = firstArgument return if ('parameters' not in function.elementNode.attributes): return parameters = function.elementNode.attributes['parameters'].strip() if (parameters == ''): return parameterWords = parameters.split(',') for (parameterWordIndex, parameterWord) in enumerate(parameterWords): strippedWord = parameterWord.strip() keyValue = KeyValue().getByEqual(strippedWord) if (parameterWordIndex < len(arguments)): function.localDictionary[keyValue.key] = arguments[parameterWordIndex] else: strippedValue = keyValue.value if (strippedValue == None): print 'Warning there is no default parameter in getParameterValue for:' print strippedWord print parameterWords print arguments print function.elementNode.attributes else: strippedValue = strippedValue.strip() function.localDictionary[keyValue.key.strip()] = strippedValue if (len(arguments) > len(parameterWords)): print 'Warning there are too many initializeFunction parameters for:' print function.elementNode.attributes print parameterWords print arguments
[ "def", "setFunctionLocalDictionary", "(", "arguments", ",", "function", ")", ":", "function", ".", "localDictionary", "=", "{", "'_arguments'", ":", "arguments", "}", "if", "(", "len", "(", "arguments", ")", ">", "0", ")", ":", "firstArgument", "=", "arguments", "[", "0", "]", "if", "(", "firstArgument", ".", "__class__", "==", "dict", ")", ":", "function", ".", "localDictionary", "=", "firstArgument", "return", "if", "(", "'parameters'", "not", "in", "function", ".", "elementNode", ".", "attributes", ")", ":", "return", "parameters", "=", "function", ".", "elementNode", ".", "attributes", "[", "'parameters'", "]", ".", "strip", "(", ")", "if", "(", "parameters", "==", "''", ")", ":", "return", "parameterWords", "=", "parameters", ".", "split", "(", "','", ")", "for", "(", "parameterWordIndex", ",", "parameterWord", ")", "in", "enumerate", "(", "parameterWords", ")", ":", "strippedWord", "=", "parameterWord", ".", "strip", "(", ")", "keyValue", "=", "KeyValue", "(", ")", ".", "getByEqual", "(", "strippedWord", ")", "if", "(", "parameterWordIndex", "<", "len", "(", "arguments", ")", ")", ":", "function", ".", "localDictionary", "[", "keyValue", ".", "key", "]", "=", "arguments", "[", "parameterWordIndex", "]", "else", ":", "strippedValue", "=", "keyValue", ".", "value", "if", "(", "strippedValue", "==", "None", ")", ":", "print", "'Warning there is no default parameter in getParameterValue for:'", "print", "strippedWord", "print", "parameterWords", "print", "arguments", "print", "function", ".", "elementNode", ".", "attributes", "else", ":", "strippedValue", "=", "strippedValue", ".", "strip", "(", ")", "function", ".", "localDictionary", "[", "keyValue", ".", "key", ".", "strip", "(", ")", "]", "=", "strippedValue", "if", "(", "len", "(", "arguments", ")", ">", "len", "(", "parameterWords", ")", ")", ":", "print", "'Warning there are too many initializeFunction parameters for:'", "print", "function", ".", "elementNode", ".", "attributes", "print", "parameterWords", "print", "arguments" ]
evaluate the function statement and delete the evaluators .
train
false
2,680
def http2time(text): m = strict_re.search(text) if m: g = m.groups() mon = (months_lower.index(g[1].lower()) + 1) tt = (int(g[2]), mon, int(g[0]), int(g[3]), int(g[4]), float(g[5])) return my_timegm(tt) text = text.lstrip() text = wkday_re.sub('', text, 1) (day, mon, yr, hr, min, sec, tz) = ([None] * 7) m = loose_http_re.search(text) if (m is not None): (day, mon, yr, hr, min, sec, tz) = m.groups() else: return None return _str2time(day, mon, yr, hr, min, sec, tz)
[ "def", "http2time", "(", "text", ")", ":", "m", "=", "strict_re", ".", "search", "(", "text", ")", "if", "m", ":", "g", "=", "m", ".", "groups", "(", ")", "mon", "=", "(", "months_lower", ".", "index", "(", "g", "[", "1", "]", ".", "lower", "(", ")", ")", "+", "1", ")", "tt", "=", "(", "int", "(", "g", "[", "2", "]", ")", ",", "mon", ",", "int", "(", "g", "[", "0", "]", ")", ",", "int", "(", "g", "[", "3", "]", ")", ",", "int", "(", "g", "[", "4", "]", ")", ",", "float", "(", "g", "[", "5", "]", ")", ")", "return", "my_timegm", "(", "tt", ")", "text", "=", "text", ".", "lstrip", "(", ")", "text", "=", "wkday_re", ".", "sub", "(", "''", ",", "text", ",", "1", ")", "(", "day", ",", "mon", ",", "yr", ",", "hr", ",", "min", ",", "sec", ",", "tz", ")", "=", "(", "[", "None", "]", "*", "7", ")", "m", "=", "loose_http_re", ".", "search", "(", "text", ")", "if", "(", "m", "is", "not", "None", ")", ":", "(", "day", ",", "mon", ",", "yr", ",", "hr", ",", "min", ",", "sec", ",", "tz", ")", "=", "m", ".", "groups", "(", ")", "else", ":", "return", "None", "return", "_str2time", "(", "day", ",", "mon", ",", "yr", ",", "hr", ",", "min", ",", "sec", ",", "tz", ")" ]
returns time in seconds since epoch of time represented by a string .
train
false
2,681
def postDeploy(site): pass
[ "def", "postDeploy", "(", "site", ")", ":", "pass" ]
called after deploying the site .
train
false
2,683
def __methods(cls): _dict = {} __methodDict(cls, _dict) return _dict.keys()
[ "def", "__methods", "(", "cls", ")", ":", "_dict", "=", "{", "}", "__methodDict", "(", "cls", ",", "_dict", ")", "return", "_dict", ".", "keys", "(", ")" ]
helper function for scrolled canvas .
train
false
2,684
def test_comma_separated_list(): assert (hug.types.comma_separated_list('value') == ['value']) assert (hug.types.comma_separated_list('value1,value2') == ['value1', 'value2'])
[ "def", "test_comma_separated_list", "(", ")", ":", "assert", "(", "hug", ".", "types", ".", "comma_separated_list", "(", "'value'", ")", "==", "[", "'value'", "]", ")", "assert", "(", "hug", ".", "types", ".", "comma_separated_list", "(", "'value1,value2'", ")", "==", "[", "'value1'", ",", "'value2'", "]", ")" ]
tests that hugs comma separated type correctly converts into a python list .
train
false
2,685
def chfullname(name, fullname): return update(name=name, fullname=fullname)
[ "def", "chfullname", "(", "name", ",", "fullname", ")", ":", "return", "update", "(", "name", "=", "name", ",", "fullname", "=", "fullname", ")" ]
change the users full name cli example: .
train
false
2,686
def check_conflicts(unmerged): if prefs.check_conflicts(): unmerged = [path for path in unmerged if is_conflict_free(path)] return unmerged
[ "def", "check_conflicts", "(", "unmerged", ")", ":", "if", "prefs", ".", "check_conflicts", "(", ")", ":", "unmerged", "=", "[", "path", "for", "path", "in", "unmerged", "if", "is_conflict_free", "(", "path", ")", "]", "return", "unmerged" ]
checks if there are any conflicting arguments passed .
train
false
2,687
def system_shutdown(): logging.info('Performing system shutdown') Thread(target=halt).start() while __INITIALIZED__: time.sleep(1.0) if sabnzbd.WIN32: powersup.win_shutdown() elif DARWIN: powersup.osx_shutdown() else: powersup.linux_shutdown()
[ "def", "system_shutdown", "(", ")", ":", "logging", ".", "info", "(", "'Performing system shutdown'", ")", "Thread", "(", "target", "=", "halt", ")", ".", "start", "(", ")", "while", "__INITIALIZED__", ":", "time", ".", "sleep", "(", "1.0", ")", "if", "sabnzbd", ".", "WIN32", ":", "powersup", ".", "win_shutdown", "(", ")", "elif", "DARWIN", ":", "powersup", ".", "osx_shutdown", "(", ")", "else", ":", "powersup", ".", "linux_shutdown", "(", ")" ]
shutdown system after halting download and saving bookkeeping .
train
false
2,690
def get_package_path(package_name): try: __import__(package_name) return sys.modules[package_name].__path__[0] except AttributeError: raise AttributeError(('%r is not a package' % package_name))
[ "def", "get_package_path", "(", "package_name", ")", ":", "try", ":", "__import__", "(", "package_name", ")", "return", "sys", ".", "modules", "[", "package_name", "]", ".", "__path__", "[", "0", "]", "except", "AttributeError", ":", "raise", "AttributeError", "(", "(", "'%r is not a package'", "%", "package_name", ")", ")" ]
import *package_name* .
train
false
2,691
def find_max_occupancy_node(dir_list): count = 0 number = 0 length = 0 for dirs in dir_list: if (length < len(dirs)): length = len(dirs) number = count count += 1 return number
[ "def", "find_max_occupancy_node", "(", "dir_list", ")", ":", "count", "=", "0", "number", "=", "0", "length", "=", "0", "for", "dirs", "in", "dir_list", ":", "if", "(", "length", "<", "len", "(", "dirs", ")", ")", ":", "length", "=", "len", "(", "dirs", ")", "number", "=", "count", "count", "+=", "1", "return", "number" ]
find node with maximum occupancy .
train
false
2,692
def to_qcolor(color): qcolor = QtGui.QColor() try: rgba = mcolors.to_rgba(color) except ValueError: warnings.warn((u'Ignoring invalid color %r' % color)) return qcolor qcolor.setRgbF(*rgba) return qcolor
[ "def", "to_qcolor", "(", "color", ")", ":", "qcolor", "=", "QtGui", ".", "QColor", "(", ")", "try", ":", "rgba", "=", "mcolors", ".", "to_rgba", "(", "color", ")", "except", "ValueError", ":", "warnings", ".", "warn", "(", "(", "u'Ignoring invalid color %r'", "%", "color", ")", ")", "return", "qcolor", "qcolor", ".", "setRgbF", "(", "*", "rgba", ")", "return", "qcolor" ]
create a qcolor from a matplotlib color .
train
false
2,693
def VonMises(name, mu, k): return rv(name, VonMisesDistribution, (mu, k))
[ "def", "VonMises", "(", "name", ",", "mu", ",", "k", ")", ":", "return", "rv", "(", "name", ",", "VonMisesDistribution", ",", "(", "mu", ",", "k", ")", ")" ]
create a continuous random variable with a von mises distribution .
train
false
2,694
def get_missing_flags(conf, atom, flags): new_flags = [] for flag in flags: if (not has_flag(conf, atom, flag)): new_flags.append(flag) return new_flags
[ "def", "get_missing_flags", "(", "conf", ",", "atom", ",", "flags", ")", ":", "new_flags", "=", "[", "]", "for", "flag", "in", "flags", ":", "if", "(", "not", "has_flag", "(", "conf", ",", "atom", ",", "flag", ")", ")", ":", "new_flags", ".", "append", "(", "flag", ")", "return", "new_flags" ]
find out which of the given flags are currently not set .
train
true
2,695
def update_real_time_attachments(real_time_data_components): for d in real_time_data_components: d.detach() for d in real_time_data_components: d.attach()
[ "def", "update_real_time_attachments", "(", "real_time_data_components", ")", ":", "for", "d", "in", "real_time_data_components", ":", "d", ".", "detach", "(", ")", "for", "d", "in", "real_time_data_components", ":", "d", ".", "attach", "(", ")" ]
updates all the real-time channels .
train
false
2,697
def _resp_etag_property(): def getter(self): etag = self.headers.get('etag', None) if etag: etag = etag.replace('"', '') return etag def setter(self, value): if (value is None): self.headers['etag'] = None else: self.headers['etag'] = ('"%s"' % value) return property(getter, setter, doc='Retrieve and set the response Etag header')
[ "def", "_resp_etag_property", "(", ")", ":", "def", "getter", "(", "self", ")", ":", "etag", "=", "self", ".", "headers", ".", "get", "(", "'etag'", ",", "None", ")", "if", "etag", ":", "etag", "=", "etag", ".", "replace", "(", "'\"'", ",", "''", ")", "return", "etag", "def", "setter", "(", "self", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "self", ".", "headers", "[", "'etag'", "]", "=", "None", "else", ":", "self", ".", "headers", "[", "'etag'", "]", "=", "(", "'\"%s\"'", "%", "value", ")", "return", "property", "(", "getter", ",", "setter", ",", "doc", "=", "'Retrieve and set the response Etag header'", ")" ]
set and retrieve response .
train
false
2,699
def _fileno_can_read(fileno): return (len(select.select([fileno], [], [], 0)[0]) > 0)
[ "def", "_fileno_can_read", "(", "fileno", ")", ":", "return", "(", "len", "(", "select", ".", "select", "(", "[", "fileno", "]", ",", "[", "]", ",", "[", "]", ",", "0", ")", "[", "0", "]", ")", ">", "0", ")" ]
check if a file descriptor is readable .
train
false
2,700
def is_tree(G): if (len(G) == 0): raise nx.exception.NetworkXPointlessConcept('G has no nodes.') if G.is_directed(): is_connected = nx.is_weakly_connected else: is_connected = nx.is_connected return (((len(G) - 1) == G.number_of_edges()) and is_connected(G))
[ "def", "is_tree", "(", "G", ")", ":", "if", "(", "len", "(", "G", ")", "==", "0", ")", ":", "raise", "nx", ".", "exception", ".", "NetworkXPointlessConcept", "(", "'G has no nodes.'", ")", "if", "G", ".", "is_directed", "(", ")", ":", "is_connected", "=", "nx", ".", "is_weakly_connected", "else", ":", "is_connected", "=", "nx", ".", "is_connected", "return", "(", "(", "(", "len", "(", "G", ")", "-", "1", ")", "==", "G", ".", "number_of_edges", "(", ")", ")", "and", "is_connected", "(", "G", ")", ")" ]
returns true if g is a tree .
train
false