id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
25,541
def timeout_command(command, timeout): cmd = command.split(' ') start = datetime.datetime.now() process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) while (process.poll() is None): time.sleep(0.2) now = datetime.datetime.now() if ((now - start).seconds > timeout): os.system(('sudo kill %s' % process.pid)) os.waitpid((-1), os.WNOHANG) return [] return process.stdout.readlines()
[ "def", "timeout_command", "(", "command", ",", "timeout", ")", ":", "cmd", "=", "command", ".", "split", "(", "' '", ")", "start", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "s...
call shell-command and either return its output or kill it if it doesnt normally exit within timeout seconds and return none .
train
false
25,546
def test_install_with_hacked_egg_info(script, data): run_from = data.packages.join('HackedEggInfo') result = script.pip('install', '.', cwd=run_from) assert ('Successfully installed hackedegginfo-0.0.0\n' in result.stdout)
[ "def", "test_install_with_hacked_egg_info", "(", "script", ",", "data", ")", ":", "run_from", "=", "data", ".", "packages", ".", "join", "(", "'HackedEggInfo'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'.'", ",", "cwd", "=", "run_f...
test installing a package which defines its own egg_info class .
train
false
25,547
def how_to_configure_alias(): settings.init() logs.how_to_configure_alias(shell.how_to_configure())
[ "def", "how_to_configure_alias", "(", ")", ":", "settings", ".", "init", "(", ")", "logs", ".", "how_to_configure_alias", "(", "shell", ".", "how_to_configure", "(", ")", ")" ]
shows useful information about how-to configure alias .
train
false
25,548
def writes_bytecode_files(fxn): if sys.dont_write_bytecode: return (lambda *args, **kwargs: None) @functools.wraps(fxn) def wrapper(*args, **kwargs): original = sys.dont_write_bytecode sys.dont_write_bytecode = False try: to_return = fxn(*args, **kwargs) finally: sys.dont_write_bytecode = original return to_return return wrapper
[ "def", "writes_bytecode_files", "(", "fxn", ")", ":", "if", "sys", ".", "dont_write_bytecode", ":", "return", "(", "lambda", "*", "args", ",", "**", "kwargs", ":", "None", ")", "@", "functools", ".", "wraps", "(", "fxn", ")", "def", "wrapper", "(", "*"...
decorator to protect sys .
train
false
25,549
def format_wiki_url(api_key, symbol, start_date, end_date): query_params = [('start_date', start_date.strftime('%Y-%m-%d')), ('end_date', end_date.strftime('%Y-%m-%d')), ('order', 'asc')] if (api_key is not None): query_params = ([('api_key', api_key)] + query_params) return 'https://www.quandl.com/api/v3/datasets/WIKI/{symbol}.csv?{query}'.format(symbol=symbol, query=urlencode(query_params))
[ "def", "format_wiki_url", "(", "api_key", ",", "symbol", ",", "start_date", ",", "end_date", ")", ":", "query_params", "=", "[", "(", "'start_date'", ",", "start_date", ".", "strftime", "(", "'%Y-%m-%d'", ")", ")", ",", "(", "'end_date'", ",", "end_date", ...
build a query url for a quandl wiki dataset .
train
false
25,551
def _ls_task_logs(fs, log_dir_stream, application_id=None, job_id=None): stderr_logs = [] syslogs = [] for match in _ls_logs(fs, log_dir_stream, _match_task_log_path, application_id=application_id, job_id=job_id): if (match['log_type'] == 'stderr'): stderr_logs.append(match) elif (match['log_type'] == 'syslog'): syslogs.append(match) key_to_syslog = dict(((_log_key(match), match) for match in syslogs)) for stderr_log in stderr_logs: stderr_log['syslog'] = key_to_syslog.get(_log_key(stderr_log)) stderr_logs_with_syslog = [m for m in stderr_logs if m['syslog']] return (stderr_logs_with_syslog + syslogs)
[ "def", "_ls_task_logs", "(", "fs", ",", "log_dir_stream", ",", "application_id", "=", "None", ",", "job_id", "=", "None", ")", ":", "stderr_logs", "=", "[", "]", "syslogs", "=", "[", "]", "for", "match", "in", "_ls_logs", "(", "fs", ",", "log_dir_stream"...
yield matching logs .
train
false
25,556
def mmread(source): return MMFile().read(source)
[ "def", "mmread", "(", "source", ")", ":", "return", "MMFile", "(", ")", ".", "read", "(", "source", ")" ]
reads the contents of a matrix market file-like source into a matrix .
train
false
25,558
def inverse(u, v): (u3, v3) = (long(u), long(v)) (u1, v1) = (1L, 0L) while (v3 > 0): q = divmod(u3, v3)[0] (u1, v1) = (v1, (u1 - (v1 * q))) (u3, v3) = (v3, (u3 - (v3 * q))) while (u1 < 0): u1 = (u1 + v) return u1
[ "def", "inverse", "(", "u", ",", "v", ")", ":", "(", "u3", ",", "v3", ")", "=", "(", "long", "(", "u", ")", ",", "long", "(", "v", ")", ")", "(", "u1", ",", "v1", ")", "=", "(", "1", "L", ",", "0", "L", ")", "while", "(", "v3", ">", ...
inverse:long return the inverse of u mod v .
train
false
25,559
def default_locale(category=None, aliases=LOCALE_ALIASES): varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG') for name in filter(None, varnames): locale = os.getenv(name) if locale: if ((name == 'LANGUAGE') and (':' in locale)): locale = locale.split(':')[0] if (locale.split('.')[0] in ('C', 'POSIX')): locale = 'en_US_POSIX' elif (aliases and (locale in aliases)): locale = aliases[locale] try: return get_locale_identifier(parse_locale(locale)) except ValueError: pass
[ "def", "default_locale", "(", "category", "=", "None", ",", "aliases", "=", "LOCALE_ALIASES", ")", ":", "varnames", "=", "(", "category", ",", "'LANGUAGE'", ",", "'LC_ALL'", ",", "'LC_CTYPE'", ",", "'LANG'", ")", "for", "name", "in", "filter", "(", "None",...
require the locale to be the default .
train
false
25,560
def make_temporary_directory(base_path): if (not base_path.exists()): base_path.makedirs() temp_dir = tempfile.mkdtemp(dir=base_path.path) return FilePath(temp_dir)
[ "def", "make_temporary_directory", "(", "base_path", ")", ":", "if", "(", "not", "base_path", ".", "exists", "(", ")", ")", ":", "base_path", ".", "makedirs", "(", ")", "temp_dir", "=", "tempfile", ".", "mkdtemp", "(", "dir", "=", "base_path", ".", "path...
create a temporary directory beneath base_path .
train
false
25,561
def _parallel_predict_proba(estimators, estimators_features, X, n_classes): n_samples = X.shape[0] proba = np.zeros((n_samples, n_classes)) for (estimator, features) in zip(estimators, estimators_features): if hasattr(estimator, 'predict_proba'): proba_estimator = estimator.predict_proba(X[:, features]) if (n_classes == len(estimator.classes_)): proba += proba_estimator else: proba[:, estimator.classes_] += proba_estimator[:, range(len(estimator.classes_))] else: predictions = estimator.predict(X[:, features]) for i in range(n_samples): proba[(i, predictions[i])] += 1 return proba
[ "def", "_parallel_predict_proba", "(", "estimators", ",", "estimators_features", ",", "X", ",", "n_classes", ")", ":", "n_samples", "=", "X", ".", "shape", "[", "0", "]", "proba", "=", "np", ".", "zeros", "(", "(", "n_samples", ",", "n_classes", ")", ")"...
private function used to compute predictions within a job .
train
false
25,563
def _sanitize_tag(match): tag = match.group(0) if (ALLOWED_BASIC_TAG_PATTERN.match(tag) or ALLOWED_A_PATTERN.match(tag) or ALLOWED_IMG_PATTERN.match(tag)): return tag else: return ''
[ "def", "_sanitize_tag", "(", "match", ")", ":", "tag", "=", "match", ".", "group", "(", "0", ")", "if", "(", "ALLOWED_BASIC_TAG_PATTERN", ".", "match", "(", "tag", ")", "or", "ALLOWED_A_PATTERN", ".", "match", "(", "tag", ")", "or", "ALLOWED_IMG_PATTERN", ...
return the tag if it is allowed or the empty string otherwise .
train
false
25,564
def test_prewitt_h_zeros(): result = filters.prewitt_h(np.zeros((10, 10)), np.ones((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_prewitt_h_zeros", "(", ")", ":", "result", "=", "filters", ".", "prewitt_h", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclos...
horizontal prewitt on an array of all zeros .
train
false
25,565
@register.function @jinja2.contextfunction def media(context, url): return urljoin(settings.MEDIA_URL, cache_buster(context, url))
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "media", "(", "context", ",", "url", ")", ":", "return", "urljoin", "(", "settings", ".", "MEDIA_URL", ",", "cache_buster", "(", "context", ",", "url", ")", ")" ]
adds media-related context variables to the context .
train
false
25,566
def low(data, queue=False, **kwargs): conflict = _check_queue(queue, kwargs) if (conflict is not None): return conflict try: st_ = salt.state.State(__opts__, proxy=__proxy__) except NameError: st_ = salt.state.State(__opts__) err = st_.verify_data(data) if err: __context__['retcode'] = 1 return err ret = st_.call(data) if isinstance(ret, list): __context__['retcode'] = 1 if salt.utils.check_state_result(ret): __context__['retcode'] = 2 return ret
[ "def", "low", "(", "data", ",", "queue", "=", "False", ",", "**", "kwargs", ")", ":", "conflict", "=", "_check_queue", "(", "queue", ",", "kwargs", ")", "if", "(", "conflict", "is", "not", "None", ")", ":", "return", "conflict", "try", ":", "st_", ...
execute a single low data call this function is mostly intended for testing the state system and is not likely to be needed in everyday usage .
train
false
25,569
def RaisedCosine(name, mu, s): return rv(name, RaisedCosineDistribution, (mu, s))
[ "def", "RaisedCosine", "(", "name", ",", "mu", ",", "s", ")", ":", "return", "rv", "(", "name", ",", "RaisedCosineDistribution", ",", "(", "mu", ",", "s", ")", ")" ]
create a continuous random variable with a raised cosine distribution .
train
false
25,570
def _product(*iters): import itertools inf_iters = tuple((itertools.cycle(enumerate(it)) for it in iters)) num_iters = len(inf_iters) cur_val = ([None] * num_iters) first_v = True while True: (i, p) = (0, num_iters) while (p and (not i)): p -= 1 (i, cur_val[p]) = next(inf_iters[p]) if ((not p) and (not i)): if first_v: first_v = False else: break (yield cur_val)
[ "def", "_product", "(", "*", "iters", ")", ":", "import", "itertools", "inf_iters", "=", "tuple", "(", "(", "itertools", ".", "cycle", "(", "enumerate", "(", "it", ")", ")", "for", "it", "in", "iters", ")", ")", "num_iters", "=", "len", "(", "inf_ite...
cartesian product generator notes unlike itertools .
train
false
25,571
def can_load_page(func): @wraps(func) def wrapper(self, *args, **kwargs): expect_loading = kwargs.pop('expect_loading', False) if expect_loading: self.loaded = False func(self, *args, **kwargs) return self.wait_for_page_loaded(timeout=kwargs.pop('timeout', None)) return func(self, *args, **kwargs) return wrapper
[ "def", "can_load_page", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "expect_loading", "=", "kwargs", ".", "pop", "(", "'expect_loading'", ",", "False", ")", "i...
decorator that specifies if user can expect page loading from this action .
train
false
25,572
def FindSentinel(filename, blocksize=(2 ** 16), error_fh=sys.stderr): if (filename == '-'): StatusUpdate("Can't combine --append with output to stdout.", error_fh) sys.exit(2) try: fp = open(filename, 'rb') except IOError as err: StatusUpdate(("Append mode disabled: can't read %r: %s." % (filename, err)), error_fh) return None try: fp.seek(0, 2) fp.seek(max(0, (fp.tell() - blocksize))) lines = fp.readlines() del lines[:1] sentinel = None for line in lines: if (not line.startswith(' DCTB ')): sentinel = line if (not sentinel): StatusUpdate(("Append mode disabled: can't find sentinel in %r." % filename), error_fh) return None return sentinel.rstrip('\n') finally: fp.close()
[ "def", "FindSentinel", "(", "filename", ",", "blocksize", "=", "(", "2", "**", "16", ")", ",", "error_fh", "=", "sys", ".", "stderr", ")", ":", "if", "(", "filename", "==", "'-'", ")", ":", "StatusUpdate", "(", "\"Can't combine --append with output to stdout...
return the sentinel line from the output file .
train
false
25,574
def sid(): return ('id-' + rndstr(17))
[ "def", "sid", "(", ")", ":", "return", "(", "'id-'", "+", "rndstr", "(", "17", ")", ")" ]
creates an unique sid for each session .
train
false
25,575
@event(u'task.execute.completed') def flush_task(task): SimplePersistence.flush(task.name) if task.manager.is_daemon: SimplePersistence.flush()
[ "@", "event", "(", "u'task.execute.completed'", ")", "def", "flush_task", "(", "task", ")", ":", "SimplePersistence", ".", "flush", "(", "task", ".", "name", ")", "if", "task", ".", "manager", ".", "is_daemon", ":", "SimplePersistence", ".", "flush", "(", ...
stores all in memory key/value pairs to database when a task has completed .
train
false
25,577
def test_svc(): (X_blobs, y_blobs) = make_blobs(n_samples=100, centers=10, random_state=0) X_blobs = sparse.csr_matrix(X_blobs) datasets = [[X_sp, Y, T], [X2_sp, Y2, T2], [X_blobs[:80], y_blobs[:80], X_blobs[80:]], [iris.data, iris.target, iris.data]] kernels = ['linear', 'poly', 'rbf', 'sigmoid'] for dataset in datasets: for kernel in kernels: clf = svm.SVC(kernel=kernel, probability=True, random_state=0, decision_function_shape='ovo') sp_clf = svm.SVC(kernel=kernel, probability=True, random_state=0, decision_function_shape='ovo') check_svm_model_equal(clf, sp_clf, *dataset)
[ "def", "test_svc", "(", ")", ":", "(", "X_blobs", ",", "y_blobs", ")", "=", "make_blobs", "(", "n_samples", "=", "100", ",", "centers", "=", "10", ",", "random_state", "=", "0", ")", "X_blobs", "=", "sparse", ".", "csr_matrix", "(", "X_blobs", ")", "...
check that sparse svc gives the same result as svc .
train
false
25,579
def py_binary(name, srcs=[], deps=[], main=None, base=None, **kwargs): target = PythonBinary(name, srcs, deps, main, base, kwargs) blade.blade.register_target(target)
[ "def", "py_binary", "(", "name", ",", "srcs", "=", "[", "]", ",", "deps", "=", "[", "]", ",", "main", "=", "None", ",", "base", "=", "None", ",", "**", "kwargs", ")", ":", "target", "=", "PythonBinary", "(", "name", ",", "srcs", ",", "deps", ",...
python binary .
train
false
25,581
def _lower(s): if s: return s.lower() return None
[ "def", "_lower", "(", "s", ")", ":", "if", "s", ":", "return", "s", ".", "lower", "(", ")", "return", "None" ]
return s .
train
false
25,582
def _format_zendesk_custom_fields(context): custom_fields = [] for (key, val) in settings.ZENDESK_CUSTOM_FIELDS.items(): if (key in context): custom_fields.append({'id': val, 'value': context[key]}) return custom_fields
[ "def", "_format_zendesk_custom_fields", "(", "context", ")", ":", "custom_fields", "=", "[", "]", "for", "(", "key", ",", "val", ")", "in", "settings", ".", "ZENDESK_CUSTOM_FIELDS", ".", "items", "(", ")", ":", "if", "(", "key", "in", "context", ")", ":"...
format the data in context for compatibility with the zendesk api .
train
false
25,584
def random_stochastic_matrix(n, k=None, sparse=False, format='csr', random_state=None): P = _random_stochastic_matrix(m=n, n=n, k=k, sparse=sparse, format=format, random_state=random_state) return P
[ "def", "random_stochastic_matrix", "(", "n", ",", "k", "=", "None", ",", "sparse", "=", "False", ",", "format", "=", "'csr'", ",", "random_state", "=", "None", ")", ":", "P", "=", "_random_stochastic_matrix", "(", "m", "=", "n", ",", "n", "=", "n", "...
return a randomly sampled n x n stochastic matrix with k nonzero entries for each row .
train
true
25,586
def is_block(fthing): if ((fthing is sys.stdout) or (fthing is sys.stdin)): return True else: mode = os.stat(fthing.name).st_mode return (S_ISBLK(mode) or S_ISCHR(mode))
[ "def", "is_block", "(", "fthing", ")", ":", "if", "(", "(", "fthing", "is", "sys", ".", "stdout", ")", "or", "(", "fthing", "is", "sys", ".", "stdin", ")", ")", ":", "return", "True", "else", ":", "mode", "=", "os", ".", "stat", "(", "fthing", ...
take in a file object and checks to see if its a block or fifo .
train
false
25,587
def lagrange(x, w): M = len(x) p = poly1d(0.0) for j in xrange(M): pt = poly1d(w[j]) for k in xrange(M): if (k == j): continue fac = (x[j] - x[k]) pt *= (poly1d([1.0, (- x[k])]) / fac) p += pt return p
[ "def", "lagrange", "(", "x", ",", "w", ")", ":", "M", "=", "len", "(", "x", ")", "p", "=", "poly1d", "(", "0.0", ")", "for", "j", "in", "xrange", "(", "M", ")", ":", "pt", "=", "poly1d", "(", "w", "[", "j", "]", ")", "for", "k", "in", "...
return a lagrange interpolating polynomial .
train
false
25,588
def premetadata_create_container_stat_table(self, conn, put_timestamp=None): if (put_timestamp is None): put_timestamp = normalize_timestamp(0) conn.executescript("\n CREATE TABLE container_stat (\n account TEXT,\n container TEXT,\n created_at TEXT,\n put_timestamp TEXT DEFAULT '0',\n delete_timestamp TEXT DEFAULT '0',\n object_count INTEGER,\n bytes_used INTEGER,\n reported_put_timestamp TEXT DEFAULT '0',\n reported_delete_timestamp TEXT DEFAULT '0',\n reported_object_count INTEGER DEFAULT 0,\n reported_bytes_used INTEGER DEFAULT 0,\n hash TEXT default '00000000000000000000000000000000',\n id TEXT,\n status TEXT DEFAULT '',\n status_changed_at TEXT DEFAULT '0'\n );\n\n INSERT INTO container_stat (object_count, bytes_used)\n VALUES (0, 0);\n ") conn.execute('\n UPDATE container_stat\n SET account = ?, container = ?, created_at = ?, id = ?,\n put_timestamp = ?\n ', (self.account, self.container, normalize_timestamp(time()), str(uuid4()), put_timestamp))
[ "def", "premetadata_create_container_stat_table", "(", "self", ",", "conn", ",", "put_timestamp", "=", "None", ")", ":", "if", "(", "put_timestamp", "is", "None", ")", ":", "put_timestamp", "=", "normalize_timestamp", "(", "0", ")", "conn", ".", "executescript",...
copied from swift .
train
false
25,589
def allow(methods=None, debug=False): if (not isinstance(methods, (tuple, list))): methods = [methods] methods = [m.upper() for m in methods if m] if (not methods): methods = ['GET', 'HEAD'] elif (('GET' in methods) and ('HEAD' not in methods)): methods.append('HEAD') cherrypy.response.headers['Allow'] = ', '.join(methods) if (cherrypy.request.method not in methods): if debug: cherrypy.log(('request.method %r not in methods %r' % (cherrypy.request.method, methods)), 'TOOLS.ALLOW') raise cherrypy.HTTPError(405) elif debug: cherrypy.log(('request.method %r in methods %r' % (cherrypy.request.method, methods)), 'TOOLS.ALLOW')
[ "def", "allow", "(", "methods", "=", "None", ",", "debug", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "methods", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "methods", "=", "[", "methods", "]", "methods", "=", "[", "m", ...
raise 405 if request .
train
false
25,590
def get_lookup(args, session=None, recreate=False): if (recreate and (not session)): raise ValueError('get_lookup() needs an explicit session to regen the index') index_dir = args.index_dir got_from = 'command line' if (index_dir is None): (index_dir, got_from) = defaults.get_default_index_dir_with_origin() if args.verbose: print(('Opened lookup index %(index_dir)s (from %(got_from)s)' % dict(index_dir=index_dir, got_from=got_from))) lookup = pokedex.lookup.PokedexLookup(index_dir, session=session) if recreate: lookup.rebuild_index() return lookup
[ "def", "get_lookup", "(", "args", ",", "session", "=", "None", ",", "recreate", "=", "False", ")", ":", "if", "(", "recreate", "and", "(", "not", "session", ")", ")", ":", "raise", "ValueError", "(", "'get_lookup() needs an explicit session to regen the index'",...
given a parsed options object .
train
false
25,591
@test def rast(h): if (h[:4] == 'Y\xa6j\x95'): return u'rast'
[ "@", "test", "def", "rast", "(", "h", ")", ":", "if", "(", "h", "[", ":", "4", "]", "==", "'Y\\xa6j\\x95'", ")", ":", "return", "u'rast'" ]
sun raster file .
train
false
25,592
def instance_type_get_all(context, inactive=False, filters=None): return IMPL.instance_type_get_all(context, inactive=inactive, filters=filters)
[ "def", "instance_type_get_all", "(", "context", ",", "inactive", "=", "False", ",", "filters", "=", "None", ")", ":", "return", "IMPL", ".", "instance_type_get_all", "(", "context", ",", "inactive", "=", "inactive", ",", "filters", "=", "filters", ")" ]
get all instance types .
train
false
25,594
def _build_list(option_value, item_kind): if (option_value is not None): items = [] if (option_value == 'none'): return items if (not isinstance(option_value, list)): values = option_value.split(',') else: values = option_value for value in values: if (item_kind is None): items.append(value) else: items.append({'kind': item_kind, 'name': value}) return items return None
[ "def", "_build_list", "(", "option_value", ",", "item_kind", ")", ":", "if", "(", "option_value", "is", "not", "None", ")", ":", "items", "=", "[", "]", "if", "(", "option_value", "==", "'none'", ")", ":", "return", "items", "if", "(", "not", "isinstan...
pass in an option to check for a list of items .
train
true
25,595
def p_statements(t): pass
[ "def", "p_statements", "(", "t", ")", ":", "pass" ]
statements : statements statement .
train
false
25,597
@pytest.fixture def temp_basedir_env(tmpdir, short_tmpdir): data_dir = (tmpdir / 'data') config_dir = (tmpdir / 'config') runtime_dir = (short_tmpdir / 'rt') cache_dir = (tmpdir / 'cache') runtime_dir.ensure(dir=True) runtime_dir.chmod(448) ((data_dir / 'qutebrowser') / 'state').write_text('[general]\nquickstart-done = 1', encoding='utf-8', ensure=True) env = {'XDG_DATA_HOME': str(data_dir), 'XDG_CONFIG_HOME': str(config_dir), 'XDG_RUNTIME_DIR': str(runtime_dir), 'XDG_CACHE_HOME': str(cache_dir)} return env
[ "@", "pytest", ".", "fixture", "def", "temp_basedir_env", "(", "tmpdir", ",", "short_tmpdir", ")", ":", "data_dir", "=", "(", "tmpdir", "/", "'data'", ")", "config_dir", "=", "(", "tmpdir", "/", "'config'", ")", "runtime_dir", "=", "(", "short_tmpdir", "/"...
return a dict of environment variables that fakes --temp-basedir .
train
false
25,598
def facilityMsToNet(SsVersionIndicator_presence=0): a = TpPd(pd=3) b = MessageType(mesType=58) c = Facility() packet = ((a / b) / c) if (SsVersionIndicator_presence is 1): d = SsVersionIndicatorHdr(ieiSVI=127, eightBitSVI=0) packet = (packet / d) return packet
[ "def", "facilityMsToNet", "(", "SsVersionIndicator_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "58", ")", "c", "=", "Facility", "(", ")", "packet", "=", "(", "(", "a", "/"...
facility section 9 .
train
true
25,599
def human_readable_size(value): one_decimal_point = '%.1f' base = 1024 bytes_int = float(value) if (bytes_int == 1): return '1 Byte' elif (bytes_int < base): return ('%d Bytes' % bytes_int) for (i, suffix) in enumerate(HUMANIZE_SUFFIXES): unit = (base ** (i + 2)) if (round(((bytes_int / unit) * base)) < base): return ('%.1f %s' % (((base * bytes_int) / unit), suffix))
[ "def", "human_readable_size", "(", "value", ")", ":", "one_decimal_point", "=", "'%.1f'", "base", "=", "1024", "bytes_int", "=", "float", "(", "value", ")", "if", "(", "bytes_int", "==", "1", ")", ":", "return", "'1 Byte'", "elif", "(", "bytes_int", "<", ...
convert an size in bytes into a human readable format .
train
false
25,601
def enableTrace(traceable): global _traceEnabled _traceEnabled = traceable if traceable: if (not _logger.handlers): _logger.addHandler(logging.StreamHandler()) _logger.setLevel(logging.DEBUG)
[ "def", "enableTrace", "(", "traceable", ")", ":", "global", "_traceEnabled", "_traceEnabled", "=", "traceable", "if", "traceable", ":", "if", "(", "not", "_logger", ".", "handlers", ")", ":", "_logger", ".", "addHandler", "(", "logging", ".", "StreamHandler", ...
turn on/off the tracability .
train
true
25,604
def explnPrime(x): def f(val): if (val < 0): return exp(val) else: return (1.0 / (val + 1.0)) try: result = array(list(map(f, x))) except TypeError: result = array(f(x)) return result
[ "def", "explnPrime", "(", "x", ")", ":", "def", "f", "(", "val", ")", ":", "if", "(", "val", "<", "0", ")", ":", "return", "exp", "(", "val", ")", "else", ":", "return", "(", "1.0", "/", "(", "val", "+", "1.0", ")", ")", "try", ":", "result...
this function is the first derivative of the expln function .
train
false
25,605
def sampleId_pairs(mapping_data, rarefaction_data, category): sids = rarefaction_data[0][3:] categories = [] check = 0 for k in sids: try: categories.append(mapping_data[k][category]) except KeyError: check += 1 if (check == len(sids)): raise ValueError(("No samples had a value for category: '%s'." % category)) combos = list(combinations(set(categories), 2)) sid_pairs = [] for (pair0, pair1) in combos: pair0_sids = [sid for sid in sids if ((category in mapping_data[sid]) and (mapping_data[sid][category] == pair0))] pair1_sids = [sid for sid in sids if ((category in mapping_data[sid]) and (mapping_data[sid][category] == pair1))] sid_pairs.append((pair0_sids, pair1_sids)) return (sid_pairs, combos)
[ "def", "sampleId_pairs", "(", "mapping_data", ",", "rarefaction_data", ",", "category", ")", ":", "sids", "=", "rarefaction_data", "[", "0", "]", "[", "3", ":", "]", "categories", "=", "[", "]", "check", "=", "0", "for", "k", "in", "sids", ":", "try", ...
returns list of sampleid tuples .
train
false
25,606
def get_cls(name, fallback_cls=conf.raw_layer): return globals().get(name, fallback_cls)
[ "def", "get_cls", "(", "name", ",", "fallback_cls", "=", "conf", ".", "raw_layer", ")", ":", "return", "globals", "(", ")", ".", "get", "(", "name", ",", "fallback_cls", ")" ]
returns class named "name" if it exists .
train
false
25,607
def chebyc(n, monic=False): if (n < 0): raise ValueError('n must be nonnegative.') if (n == 0): n1 = (n + 1) else: n1 = n (x, w, mu0) = roots_chebyc(n1, mu=True) if (n == 0): (x, w) = ([], []) hn = ((4 * pi) * ((n == 0) + 1)) kn = 1.0 p = orthopoly1d(x, w, hn, kn, wfunc=(lambda x: (1.0 / sqrt((1 - ((x * x) / 4.0))))), limits=((-2), 2), monic=monic) if (not monic): p._scale((2.0 / p(2))) p.__dict__['_eval_func'] = (lambda x: eval_chebyc(n, x)) return p
[ "def", "chebyc", "(", "n", ",", "monic", "=", "False", ")", ":", "if", "(", "n", "<", "0", ")", ":", "raise", "ValueError", "(", "'n must be nonnegative.'", ")", "if", "(", "n", "==", "0", ")", ":", "n1", "=", "(", "n", "+", "1", ")", "else", ...
chebyshev polynomial of the first kind on :math:[-2 .
train
false
25,608
def get_instance_type_by_flavor_id(flavorid, ctxt=None, read_deleted='yes'): if (ctxt is None): ctxt = context.get_admin_context(read_deleted=read_deleted) return db.instance_type_get_by_flavor_id(ctxt, flavorid)
[ "def", "get_instance_type_by_flavor_id", "(", "flavorid", ",", "ctxt", "=", "None", ",", "read_deleted", "=", "'yes'", ")", ":", "if", "(", "ctxt", "is", "None", ")", ":", "ctxt", "=", "context", ".", "get_admin_context", "(", "read_deleted", "=", "read_dele...
retrieve instance type by flavorid .
train
false
25,610
def metadata_response(request, full_url, headers): parsed_url = urlparse(full_url) tomorrow = (datetime.datetime.utcnow() + datetime.timedelta(days=1)) credentials = dict(AccessKeyId=u'test-key', SecretAccessKey=u'test-secret-key', Token=u'test-session-token', Expiration=tomorrow.strftime(u'%Y-%m-%dT%H:%M:%SZ')) path = parsed_url.path meta_data_prefix = u'/latest/meta-data/' if path.startswith(meta_data_prefix): path = path[len(meta_data_prefix):] if (path == u''): result = u'iam' elif (path == u'iam'): result = json.dumps({u'security-credentials': {u'default-role': credentials}}) elif (path == u'iam/security-credentials/'): result = u'default-role' elif (path == u'iam/security-credentials/default-role'): result = json.dumps(credentials) else: raise NotImplementedError(u'The {0} metadata path has not been implemented'.format(path)) return (200, headers, result)
[ "def", "metadata_response", "(", "request", ",", "full_url", ",", "headers", ")", ":", "parsed_url", "=", "urlparse", "(", "full_url", ")", "tomorrow", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "+", "datetime", ".", "timedelta", "(", ...
mock response for localhost metadata URL .
train
false
25,611
def toIRINormal(xri): if (not xri.startswith('xri://')): xri = ('xri://' + xri) return escapeForIRI(xri)
[ "def", "toIRINormal", "(", "xri", ")", ":", "if", "(", "not", "xri", ".", "startswith", "(", "'xri://'", ")", ")", ":", "xri", "=", "(", "'xri://'", "+", "xri", ")", "return", "escapeForIRI", "(", "xri", ")" ]
transform an xri to iri-normal form .
train
false
25,612
def print_blob_acl(bucket_name, blob_name): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) for entry in blob.acl: print '{}: {}'.format(entry['role'], entry['entity'])
[ "def", "print_blob_acl", "(", "bucket_name", ",", "blob_name", ")", ":", "storage_client", "=", "storage", ".", "Client", "(", ")", "bucket", "=", "storage_client", ".", "bucket", "(", "bucket_name", ")", "blob", "=", "bucket", ".", "blob", "(", "blob_name",...
prints out a blobs access control list .
train
false
25,615
def server_add_user(username, ssh_key_pwd=''): bash(("useradd -s '%s' '%s'" % (os.path.join(BASE_DIR, 'init.sh'), username))) gen_ssh_key(username, ssh_key_pwd)
[ "def", "server_add_user", "(", "username", ",", "ssh_key_pwd", "=", "''", ")", ":", "bash", "(", "(", "\"useradd -s '%s' '%s'\"", "%", "(", "os", ".", "path", ".", "join", "(", "BASE_DIR", ",", "'init.sh'", ")", ",", "username", ")", ")", ")", "gen_ssh_k...
add a system user in jumpserver 在jumpserver服务器上添加一个用户 .
train
false
25,616
def RequestsFailedCriteria(request_rate, failed_rate): alerts = [] warnings = [] def _ComputeThreshold(x): return math.ceil(math.sqrt(x)) if (failed_rate['cluster_total'] > _ComputeThreshold(request_rate['cluster_total'])): warnings.append(CLUSTER_TOKEN) for (m, v) in request_rate['machine_data'].iteritems(): if (failed_rate['machine_data'][m] > _ComputeThreshold(v)): warnings.append(m) return (alerts, warnings)
[ "def", "RequestsFailedCriteria", "(", "request_rate", ",", "failed_rate", ")", ":", "alerts", "=", "[", "]", "warnings", "=", "[", "]", "def", "_ComputeThreshold", "(", "x", ")", ":", "return", "math", ".", "ceil", "(", "math", ".", "sqrt", "(", "x", "...
monitor the rate of failed service requests on the server .
train
false
25,617
def app_restore(storage, bucket_name=None): if (not makedirs(APP_BACKUP_DIR_LOCATION)): logging.warning("Dir '{0}' already exists. Skipping dir creation...".format(APP_BACKUP_DIR_LOCATION)) if (storage == StorageTypes.GCS): objects = gcs_helper.list_bucket(bucket_name) for app_path in objects: if (not app_path.startswith(gcs_helper.APPS_GCS_PREFIX)): continue app_file = app_path[len(gcs_helper.APPS_GCS_PREFIX):] source = 'gs://{0}/{1}'.format(bucket_name, app_path) destination = '{0}/{1}'.format(APP_BACKUP_DIR_LOCATION, app_file) if (not gcs_helper.download_from_bucket(source, destination)): logging.error("Error while downloading '{0}' from GCS.".format(source)) delete_app_tars(APP_BACKUP_DIR_LOCATION) return False apps_to_deploy = [os.path.join(APP_BACKUP_DIR_LOCATION, app) for app in os.listdir(APP_BACKUP_DIR_LOCATION)] if (not deploy_apps(apps_to_deploy)): logging.error('Failed to successfully deploy one or more of the following apps: {0}'.format(apps_to_deploy)) return False return True
[ "def", "app_restore", "(", "storage", ",", "bucket_name", "=", "None", ")", ":", "if", "(", "not", "makedirs", "(", "APP_BACKUP_DIR_LOCATION", ")", ")", ":", "logging", ".", "warning", "(", "\"Dir '{0}' already exists. Skipping dir creation...\"", ".", "format", "...
restores the app source code from the backups location on the filesystem .
train
false
25,620
@skip('silverlight') def test_cp7050(): try: import Nt AssertUnreachable("Should not have been able to import 'Nt'") except: pass AssertError(ImportError, __import__, 'Nt') AssertError(ImportError, __import__, 'Lib') AssertError(ImportError, __import__, 'iptest.Assert_Util')
[ "@", "skip", "(", "'silverlight'", ")", "def", "test_cp7050", "(", ")", ":", "try", ":", "import", "Nt", "AssertUnreachable", "(", "\"Should not have been able to import 'Nt'\"", ")", "except", ":", "pass", "AssertError", "(", "ImportError", ",", "__import__", ","...
this test case complements cpythons test_import .
train
false
25,621
def validate_windows_cred(host, username='Administrator', password=None, retries=10, retry_delay=1): cmd = 'winexe -U \'{0}%{1}\' //{2} "hostname"'.format(username, password, host) logging_cmd = 'winexe -U \'{0}%XXX-REDACTED-XXX\' //{1} "hostname"'.format(username, host) for i in xrange(retries): ret_code = win_cmd(cmd, logging_command=logging_cmd) if (ret_code == 0): break time.sleep(retry_delay) return (ret_code == 0)
[ "def", "validate_windows_cred", "(", "host", ",", "username", "=", "'Administrator'", ",", "password", "=", "None", ",", "retries", "=", "10", ",", "retry_delay", "=", "1", ")", ":", "cmd", "=", "'winexe -U \\'{0}%{1}\\' //{2} \"hostname\"'", ".", "format", "(",...
check if the windows credentials are valid .
train
false
25,622
def _name_to_kw(category_cls, name): def target_split(name): 'Split a name that contains multiple words.\n\n Name is (link,campaign-subreddit) where link and campaign are\n thing fullnames. campaign and subreddit are each optional, so\n the string could look like any of these:\n (t3_bh,t8_ab-pics), (t3_bh,t8_ab), (t3_bh,-pics), (t3_bh,)\n Also check for the old format (t3_by, pics)\n\n ' (link_codename, target_info) = name campaign_codename = None if (not target_info): subreddit = '' elif (target_info.find('-') != (-1)): (campaign_codename, subreddit) = target_info.split('-', 1) elif (target_info.find('_') != (-1)): campaign_codename = target_info subreddit = '' else: subreddit = target_info return {'codename': (campaign_codename or link_codename), 'subreddit': subreddit} d = {SitewidePageviews: (lambda n: {}), PageviewsBySubreddit: (lambda n: {'subreddit': n}), PageviewsBySubredditAndPath: (lambda n: {'srpath': n}), PageviewsByLanguage: (lambda n: {'lang': n}), ClickthroughsByCodename: (lambda n: {'codename': name}), AdImpressionsByCodename: (lambda n: {'codename': name}), TargetedClickthroughsByCodename: target_split, TargetedImpressionsByCodename: target_split} return d[category_cls](name)
[ "def", "_name_to_kw", "(", "category_cls", ",", "name", ")", ":", "def", "target_split", "(", "name", ")", ":", "(", "link_codename", ",", "target_info", ")", "=", "name", "campaign_codename", "=", "None", "if", "(", "not", "target_info", ")", ":", "subred...
get the keywords needed to build an instance of traffic data .
train
false
25,623
def _my_lrap(y_true, y_score): check_consistent_length(y_true, y_score) y_true = check_array(y_true) y_score = check_array(y_score) (n_samples, n_labels) = y_true.shape score = np.empty((n_samples,)) for i in range(n_samples): (unique_rank, inv_rank) = np.unique(y_score[i], return_inverse=True) n_ranks = unique_rank.size rank = (n_ranks - inv_rank) corr_rank = np.bincount(rank, minlength=(n_ranks + 1)).cumsum() rank = corr_rank[rank] relevant = y_true[i].nonzero()[0] if ((relevant.size == 0) or (relevant.size == n_labels)): score[i] = 1 continue score[i] = 0.0 for label in relevant: n_ranked_above = sum(((rank[r] <= rank[label]) for r in relevant)) score[i] += (n_ranked_above / rank[label]) score[i] /= relevant.size return score.mean()
[ "def", "_my_lrap", "(", "y_true", ",", "y_score", ")", ":", "check_consistent_length", "(", "y_true", ",", "y_score", ")", "y_true", "=", "check_array", "(", "y_true", ")", "y_score", "=", "check_array", "(", "y_score", ")", "(", "n_samples", ",", "n_labels"...
simple implementation of label ranking average precision .
train
false
25,624
def list_deployments(jboss_config): log.debug('======================== MODULE FUNCTION: jboss7.list_deployments') command_result = __salt__['jboss7_cli.run_command'](jboss_config, 'deploy') deployments = [] if (len(command_result['stdout']) > 0): deployments = re.split('\\s*', command_result['stdout']) log.debug('deployments=%s', str(deployments)) return deployments
[ "def", "list_deployments", "(", "jboss_config", ")", ":", "log", ".", "debug", "(", "'======================== MODULE FUNCTION: jboss7.list_deployments'", ")", "command_result", "=", "__salt__", "[", "'jboss7_cli.run_command'", "]", "(", "jboss_config", ",", "'deploy'", "...
list all deployments on the jboss instance jboss_config configuration dictionary with properties specified above .
train
true
25,628
@preloaderStop def SearchResult(listItems, request, **kwargs): printLine(('Search request: %s' % request)) printList(listItems, **kwargs)
[ "@", "preloaderStop", "def", "SearchResult", "(", "listItems", ",", "request", ",", "**", "kwargs", ")", ":", "printLine", "(", "(", "'Search request: %s'", "%", "request", ")", ")", "printList", "(", "listItems", ",", "**", "kwargs", ")" ]
print search results .
train
false
25,629
def build_publisher(registry, xml_parent, data): reporter = XML.SubElement(xml_parent, 'hudson.plugins.build__publisher.BuildPublisher') XML.SubElement(reporter, 'publishUnstableBuilds').text = str(data.get('publish-unstable-builds', True)).lower() XML.SubElement(reporter, 'publishFailedBuilds').text = str(data.get('publish-failed-builds', True)).lower() if (('days-to-keep' in data) or ('num-to-keep' in data)): logrotator = XML.SubElement(reporter, 'logRotator') XML.SubElement(logrotator, 'daysToKeep').text = str(data.get('days-to-keep', (-1))) XML.SubElement(logrotator, 'numToKeep').text = str(data.get('num-to-keep', (-1))) XML.SubElement(logrotator, 'artifactDaysToKeep').text = '-1' XML.SubElement(logrotator, 'artifactNumToKeep').text = '-1'
[ "def", "build_publisher", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "reporter", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.build__publisher.BuildPublisher'", ")", "XML", ".", "SubElement", "(", "reporter", ",", "'publi...
yaml: build-publisher this plugin allows records from one jenkins to be published on another jenkins .
train
false
25,630
def fixup_build_ext(cmd): if (os.name == 'nt'): cmd.debug = sys.executable.endswith('_d.exe') elif sysconfig.get_config_var('Py_ENABLE_SHARED'): runshared = sysconfig.get_config_var('RUNSHARED') if (runshared is None): cmd.library_dirs = ['.'] elif (sys.platform == 'darwin'): cmd.library_dirs = [] else: (name, equals, value) = runshared.partition('=') cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
[ "def", "fixup_build_ext", "(", "cmd", ")", ":", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "cmd", ".", "debug", "=", "sys", ".", "executable", ".", "endswith", "(", "'_d.exe'", ")", "elif", "sysconfig", ".", "get_config_var", "(", "'Py_ENABL...
function needed to make build_ext tests pass .
train
false
25,635
def test_Integer_new(): _test_rational_new(Integer) assert _strictly_equal(Integer(0.9), S.Zero) assert _strictly_equal(Integer(10.5), Integer(10)) raises(ValueError, (lambda : Integer('10.5'))) assert (Integer(Rational(('1.' + ('9' * 20)))) == 1)
[ "def", "test_Integer_new", "(", ")", ":", "_test_rational_new", "(", "Integer", ")", "assert", "_strictly_equal", "(", "Integer", "(", "0.9", ")", ",", "S", ".", "Zero", ")", "assert", "_strictly_equal", "(", "Integer", "(", "10.5", ")", ",", "Integer", "(...
test for integer constructor .
train
false
25,637
def getTerminalCharset(): try: return getTerminalCharset.value except AttributeError: getTerminalCharset.value = _getTerminalCharset() return getTerminalCharset.value
[ "def", "getTerminalCharset", "(", ")", ":", "try", ":", "return", "getTerminalCharset", ".", "value", "except", "AttributeError", ":", "getTerminalCharset", ".", "value", "=", "_getTerminalCharset", "(", ")", "return", "getTerminalCharset", ".", "value" ]
guess terminal charset using differents tests: 1 .
train
false
25,638
def get_static_upper_page(with_shutdown): template = '\n<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n<html>\n <!-- Natural Language Toolkit: Wordnet Interface: Graphical Wordnet Browser\n Copyright (C) 2001-2017 NLTK Project\n Author: Jussi Salmela <jtsalmela@users.sourceforge.net>\n URL: <http://nltk.org/>\n For license information, see LICENSE.TXT -->\n <head>\n <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />\n <title>Untitled Document</title>\n </head>\n <body>\n <form method="GET" action="search" target="body">\n Current Word:&nbsp;<input type="text" id="currentWord" size="10" disabled>\n Next Word:&nbsp;<input type="text" id="nextWord" name="nextWord" size="10">\n <input name="searchButton" type="submit" value="Search">\n </form>\n <a target="body" href="web_help.html">Help</a>\n %s\n\n</body>\n</html>\n' if with_shutdown: shutdown_link = '<a href="SHUTDOWN THE SERVER">Shutdown</a>' else: shutdown_link = '' return (template % shutdown_link)
[ "def", "get_static_upper_page", "(", "with_shutdown", ")", ":", "template", "=", "'\\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">\\n<html>\\n <!-- Natural Language Toolkit: Wordnet Interface: Graphical Wordnet Browser\\n Copyright (C) 2001...
return the upper frame page .
train
false
25,641
def timezone_today(): if settings.USE_TZ: return timezone.localdate() else: return datetime.date.today()
[ "def", "timezone_today", "(", ")", ":", "if", "settings", ".", "USE_TZ", ":", "return", "timezone", ".", "localdate", "(", ")", "else", ":", "return", "datetime", ".", "date", ".", "today", "(", ")" ]
return the current date in the current time zone .
train
false
25,643
def get_action_by_ref(ref): try: return Action.get_by_ref(ref) except ValueError as e: LOG.debug(('Database lookup for ref="%s" resulted ' + 'in exception : %s.'), ref, e, exc_info=True) return None
[ "def", "get_action_by_ref", "(", "ref", ")", ":", "try", ":", "return", "Action", ".", "get_by_ref", "(", "ref", ")", "except", "ValueError", "as", "e", ":", "LOG", ".", "debug", "(", "(", "'Database lookup for ref=\"%s\" resulted '", "+", "'in exception : %s.'"...
returns the action object from db given a string ref .
train
false
25,644
def get_dir_path(sibling): py_file = __file__.replace('.pyc', '.py') dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))), os.path.abspath(os.path.dirname(py_file))] for dir_path in dir_paths: sibling_path = os.path.join(dir_path, sibling) if os.path.exists(sibling_path): return dir_path raise ValueError(('Could not determine directory that contains both, this file and %s.' % sibling))
[ "def", "get_dir_path", "(", "sibling", ")", ":", "py_file", "=", "__file__", ".", "replace", "(", "'.pyc'", ",", "'.py'", ")", "dir_paths", "=", "[", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ...
get a path to the directory of this script .
train
false
25,645
def create_canvas(width, height, bgcolor=u'#ffffff'): img = QImage(width, height, QImage.Format_RGB32) img.fill(QColor(bgcolor)) return img
[ "def", "create_canvas", "(", "width", ",", "height", ",", "bgcolor", "=", "u'#ffffff'", ")", ":", "img", "=", "QImage", "(", "width", ",", "height", ",", "QImage", ".", "Format_RGB32", ")", "img", ".", "fill", "(", "QColor", "(", "bgcolor", ")", ")", ...
create a blank canvas of the specified size and color .
train
false
25,647
def _SetOutputFormat(output_format): _cpplint_state.SetOutputFormat(output_format)
[ "def", "_SetOutputFormat", "(", "output_format", ")", ":", "_cpplint_state", ".", "SetOutputFormat", "(", "output_format", ")" ]
sets the modules output format .
train
false
25,649
def action_sync(): from plnt.sync import sync make_app().bind_to_context() sync()
[ "def", "action_sync", "(", ")", ":", "from", "plnt", ".", "sync", "import", "sync", "make_app", "(", ")", ".", "bind_to_context", "(", ")", "sync", "(", ")" ]
sync the blogs in the planet .
train
false
25,650
@synchronized(CONFIG_LOCK) def delete_from_database(section, keyword): global database, CFG, modified del database[section][keyword] if ((section == 'servers') and ('[' in keyword)): keyword = keyword.replace('[', '{').replace(']', '}') try: del CFG[section][keyword] except KeyError: pass modified = True
[ "@", "synchronized", "(", "CONFIG_LOCK", ")", "def", "delete_from_database", "(", "section", ",", "keyword", ")", ":", "global", "database", ",", "CFG", ",", "modified", "del", "database", "[", "section", "]", "[", "keyword", "]", "if", "(", "(", "section"...
remove section/keyword from ini database .
train
false
25,652
def configure_service(service, case_name, service_name): service.http_client.v2_http_client = atom.mock_http_core.MockHttpClient() service.http_client.v2_http_client.cache_case_name = case_name auth_token_key = ('service_%s_auth_token' % service_name) if ((auth_token_key not in options.values) and (options.get_value('runlive') == 'true')): service.http_client.v2_http_client.cache_test_name = 'client_login' cache_name = service.http_client.v2_http_client.get_cache_file_name() if (options.get_value('clearcache') == 'true'): service.http_client.v2_http_client.delete_session(cache_name) service.http_client.v2_http_client.use_cached_session(cache_name) service.ClientLogin(options.get_value('username'), options.get_value('password'), service=service_name, source=case_name) options.values[auth_token_key] = service.GetClientLoginToken() service.http_client.v2_http_client.close_session() if (auth_token_key in options.values): service.SetClientLoginToken(options.values[auth_token_key])
[ "def", "configure_service", "(", "service", ",", "case_name", ",", "service_name", ")", ":", "service", ".", "http_client", ".", "v2_http_client", "=", "atom", ".", "mock_http_core", ".", "MockHttpClient", "(", ")", "service", ".", "http_client", ".", "v2_http_c...
sets up a mock gdataservice v1 client to reuse recorded sessions .
train
false
25,653
def _stackcopy(a, b): if (a.ndim == 3): a[:] = b[:, :, np.newaxis] else: a[:] = b
[ "def", "_stackcopy", "(", "a", ",", "b", ")", ":", "if", "(", "a", ".", "ndim", "==", "3", ")", ":", "a", "[", ":", "]", "=", "b", "[", ":", ",", ":", ",", "np", ".", "newaxis", "]", "else", ":", "a", "[", ":", "]", "=", "b" ]
copy b into each color layer of a .
train
false
25,654
def linear_sum_assignment(cost_matrix): cost_matrix = np.asarray(cost_matrix) if (len(cost_matrix.shape) != 2): raise ValueError(('expected a matrix (2-d array), got a %r array' % (cost_matrix.shape,))) if (cost_matrix.shape[1] < cost_matrix.shape[0]): cost_matrix = cost_matrix.T transposed = True else: transposed = False state = _Hungary(cost_matrix) step = (None if (0 in cost_matrix.shape) else _step1) while (step is not None): step = step(state) if transposed: marked = state.marked.T else: marked = state.marked return np.where((marked == 1))
[ "def", "linear_sum_assignment", "(", "cost_matrix", ")", ":", "cost_matrix", "=", "np", ".", "asarray", "(", "cost_matrix", ")", "if", "(", "len", "(", "cost_matrix", ".", "shape", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "(", "'expected a mat...
solve the linear sum assignment problem .
train
false
25,656
def _CheckDate(date): if isinstance(date, datetime.datetime): if ((date < MIN_DATE) or (date > MAX_DATE)): raise TypeError(('date must be between %s and %s (got %s)' % (MIN_DATE, MAX_DATE, date))) elif isinstance(date, datetime.date): if ((date < MIN_DATE.date()) or (date > MAX_DATE.date())): raise TypeError(('date must be between %s and %s (got %s)' % (MIN_DATE, MAX_DATE, date))) else: raise TypeError('date must be datetime.datetime or datetime.date') return date
[ "def", "_CheckDate", "(", "date", ")", ":", "if", "isinstance", "(", "date", ",", "datetime", ".", "datetime", ")", ":", "if", "(", "(", "date", "<", "MIN_DATE", ")", "or", "(", "date", ">", "MAX_DATE", ")", ")", ":", "raise", "TypeError", "(", "("...
checks the date is in the correct range .
train
false
25,658
def create_user(name, password, *privileges): _priv = ['ADMIN_PRIV', 'REMOTE_CONS_PRIV', 'RESET_SERVER_PRIV', 'VIRTUAL_MEDIA_PRIV', 'CONFIG_ILO_PRIV'] _xml = '<RIBCL version="2.2">\n <LOGIN USER_LOGIN="x" PASSWORD="y">\n <RIB_INFO mode="write">\n <MOD_GLOBAL_SETTINGS>\n <MIN_PASSWORD VALUE="7"/>\n </MOD_GLOBAL_SETTINGS>\n </RIB_INFO>\n\n <USER_INFO MODE="write">\n <ADD_USER USER_NAME="{0}" USER_LOGIN="{0}" PASSWORD="{1}">\n {2}\n </ADD_USER>\n </USER_INFO>\n </LOGIN>\n </RIBCL>'.format(name, password, '\n'.join(['<{0} value="Y" />'.format(i.upper()) for i in privileges if (i.upper() in _priv)])) return __execute_cmd('Create_user', _xml)
[ "def", "create_user", "(", "name", ",", "password", ",", "*", "privileges", ")", ":", "_priv", "=", "[", "'ADMIN_PRIV'", ",", "'REMOTE_CONS_PRIV'", ",", "'RESET_SERVER_PRIV'", ",", "'VIRTUAL_MEDIA_PRIV'", ",", "'CONFIG_ILO_PRIV'", "]", "_xml", "=", "'<RIBCL versio...
create/ensure a user is created with provided settings .
train
true
25,659
@utils.arg('--host', dest='host', metavar='<host>', help=_('Fetch migrations for the given host.')) @utils.arg('--status', dest='status', metavar='<status>', help=_('Fetch migrations for the given status.')) @utils.arg('--cell_name', dest='cell_name', metavar='<cell_name>', help=_('Fetch migrations for the given cell_name.')) def do_migration_list(cs, args): migrations = cs.migrations.list(args.host, args.status, args.cell_name) _print_migrations(cs, migrations)
[ "@", "utils", ".", "arg", "(", "'--host'", ",", "dest", "=", "'host'", ",", "metavar", "=", "'<host>'", ",", "help", "=", "_", "(", "'Fetch migrations for the given host.'", ")", ")", "@", "utils", ".", "arg", "(", "'--status'", ",", "dest", "=", "'statu...
print a list of migrations .
train
false
25,660
def test_jt_configuration(cluster): err = validate_port(cluster.JT_THRIFT_PORT) if err: return err try: jt = LiveJobTracker.from_conf(cluster) jt.runtime_info() except TTransport.TTransportException: msg = ('Failed to contact JobTracker plugin at %s:%s.' % (cluster.HOST.get(), cluster.JT_THRIFT_PORT.get())) return [(cluster, msg)] return []
[ "def", "test_jt_configuration", "(", "cluster", ")", ":", "err", "=", "validate_port", "(", "cluster", ".", "JT_THRIFT_PORT", ")", "if", "err", ":", "return", "err", "try", ":", "jt", "=", "LiveJobTracker", ".", "from_conf", "(", "cluster", ")", "jt", ".",...
test fs configuration .
train
false
25,662
def call_silently(*args, **kwargs): with open(os.devnull, u'w') as shutup: try: return subprocess.call(stdout=shutup, stderr=shutup, *args, **kwargs) except WindowsError: return 1
[ "def", "call_silently", "(", "*", "args", ",", "**", "kwargs", ")", ":", "with", "open", "(", "os", ".", "devnull", ",", "u'w'", ")", "as", "shutup", ":", "try", ":", "return", "subprocess", ".", "call", "(", "stdout", "=", "shutup", ",", "stderr", ...
like subprocess .
train
false
25,663
def auth_is_anon_user(context): context_user = context.get('user') is_anon_user = (not bool(context_user)) return is_anon_user
[ "def", "auth_is_anon_user", "(", "context", ")", ":", "context_user", "=", "context", ".", "get", "(", "'user'", ")", "is_anon_user", "=", "(", "not", "bool", "(", "context_user", ")", ")", "return", "is_anon_user" ]
is this an anonymous user? eg not logged in if a web request and not user defined in context if logic functions called directly see ckan/lib/base .
train
false
25,664
def pad_size(n, k): if (n % k): return (k - (n % k)) else: return 0
[ "def", "pad_size", "(", "n", ",", "k", ")", ":", "if", "(", "n", "%", "k", ")", ":", "return", "(", "k", "-", "(", "n", "%", "k", ")", ")", "else", ":", "return", "0" ]
the smallest number that has to be added to n to equal a multiple of k .
train
false
25,667
def rarfile_set_path_sep(separator): if rarfile: rarfile.PATH_SEP = separator
[ "def", "rarfile_set_path_sep", "(", "separator", ")", ":", "if", "rarfile", ":", "rarfile", ".", "PATH_SEP", "=", "separator" ]
set the path separator on rarfile module .
train
false
25,668
def unused(): tkMessageBox.showinfo(u'')
[ "def", "unused", "(", ")", ":", "tkMessageBox", ".", "showinfo", "(", "u''", ")" ]
just prevent unused warnings .
train
false
25,669
def ip4_interfaces(): if salt.utils.is_proxy(): return {} ret = {} ifaces = _get_interfaces() for face in ifaces: iface_ips = [] for inet in ifaces[face].get('inet', []): if ('address' in inet): iface_ips.append(inet['address']) for secondary in ifaces[face].get('secondary', []): if ('address' in secondary): iface_ips.append(secondary['address']) ret[face] = iface_ips return {'ip4_interfaces': ret}
[ "def", "ip4_interfaces", "(", ")", ":", "if", "salt", ".", "utils", ".", "is_proxy", "(", ")", ":", "return", "{", "}", "ret", "=", "{", "}", "ifaces", "=", "_get_interfaces", "(", ")", "for", "face", "in", "ifaces", ":", "iface_ips", "=", "[", "]"...
provide a dict of the connected interfaces and their ip4 addresses the addresses will be passed as a list for each interface .
train
true
25,671
def variable_name_from_full_name(full_name): (projects, _, configs, _, variables, result) = full_name.split('/', 5) if ((projects != 'projects') or (configs != 'configs') or (variables != 'variables')): raise ValueError('Unexpected format of resource', full_name, 'Expected "projects/{proj}/configs/{cfg}/variables/..."') return result
[ "def", "variable_name_from_full_name", "(", "full_name", ")", ":", "(", "projects", ",", "_", ",", "configs", ",", "_", ",", "variables", ",", "result", ")", "=", "full_name", ".", "split", "(", "'/'", ",", "5", ")", "if", "(", "(", "projects", "!=", ...
extract the variable name from a full resource name .
train
true
25,672
@post('/option/<taskid>/get') def option_get(taskid): if (taskid not in DataStore.tasks): logger.warning(('[%s] Invalid task ID provided to option_get()' % taskid)) return jsonize({'success': False, 'message': 'Invalid task ID'}) option = request.json.get('option', '') if (option in DataStore.tasks[taskid].options): logger.debug(('[%s] Retrieved value for option %s' % (taskid, option))) return jsonize({'success': True, option: DataStore.tasks[taskid].get_option(option)}) else: logger.debug(('[%s] Requested value for unknown option %s' % (taskid, option))) return jsonize({'success': False, 'message': 'Unknown option', option: 'not set'})
[ "@", "post", "(", "'/option/<taskid>/get'", ")", "def", "option_get", "(", "taskid", ")", ":", "if", "(", "taskid", "not", "in", "DataStore", ".", "tasks", ")", ":", "logger", ".", "warning", "(", "(", "'[%s] Invalid task ID provided to option_get()'", "%", "t...
get the value of an option for a certain task id .
train
false
25,673
def del_before_definition(rec): n = 5 for i in range(n): rec.mark(str(i)) n = 0 for j in range(n): return 0 else: if (i < 2): continue elif (i == 2): for j in range(i): return i rec.mark('FAILED') rec.mark('FAILED') rec.mark('FAILED') rec.mark('OK') return (-1)
[ "def", "del_before_definition", "(", "rec", ")", ":", "n", "=", "5", "for", "i", "in", "range", "(", "n", ")", ":", "rec", ".", "mark", "(", "str", "(", "i", ")", ")", "n", "=", "0", "for", "j", "in", "range", "(", "n", ")", ":", "return", ...
this test reveal a bug that there is a del on uninitialized variable .
train
false
25,674
@require_POST @login_required @permitted def un_flag_abuse_for_thread(request, course_id, thread_id): user = cc.User.from_django_user(request.user) course_key = CourseKey.from_string(course_id) course = get_course_by_id(course_key) thread = cc.Thread.find(thread_id) remove_all = bool((has_permission(request.user, 'openclose_thread', course_key) or has_access(request.user, 'staff', course))) thread.unFlagAbuse(user, thread, remove_all) return JsonResponse(prepare_content(thread.to_dict(), course_key))
[ "@", "require_POST", "@", "login_required", "@", "permitted", "def", "un_flag_abuse_for_thread", "(", "request", ",", "course_id", ",", "thread_id", ")", ":", "user", "=", "cc", ".", "User", ".", "from_django_user", "(", "request", ".", "user", ")", "course_ke...
given a course id and thread id .
train
false
25,675
def coalesce_table_labels(attributes, onrows, oncolumns): if ((not onrows) or (not oncolumns)): onrows = (onrows or []) oncolumns = (oncolumns or []) if (not onrows): onrows = [attr for attr in attributes if (attr not in oncolumns)] if (not oncolumns): oncolumns = [attr for attr in attributes if (attr not in onrows)] return (onrows, oncolumns)
[ "def", "coalesce_table_labels", "(", "attributes", ",", "onrows", ",", "oncolumns", ")", ":", "if", "(", "(", "not", "onrows", ")", "or", "(", "not", "oncolumns", ")", ")", ":", "onrows", "=", "(", "onrows", "or", "[", "]", ")", "oncolumns", "=", "("...
returns a tuple 9onrows .
train
false
25,676
def get_github_emoji(): try: resp = requests.get(u'https://api.github.com/emojis', timeout=30) except Exception: return None return json.loads(resp.text)
[ "def", "get_github_emoji", "(", ")", ":", "try", ":", "resp", "=", "requests", ".", "get", "(", "u'https://api.github.com/emojis'", ",", "timeout", "=", "30", ")", "except", "Exception", ":", "return", "None", "return", "json", ".", "loads", "(", "resp", "...
get githubs usable emoji .
train
false
25,677
def http_form_post_message(message, location, relay_state='', typ='SAMLRequest'): response = ['<head>', '<title>SAML 2.0 POST</title>', '</head><body>'] if (not isinstance(message, basestring)): message = ('%s' % (message,)) if ((typ == 'SAMLRequest') or (typ == 'SAMLResponse')): _msg = base64.b64encode(message) else: _msg = message response.append((FORM_SPEC % (location, typ, _msg, relay_state))) response.append('<script type="text/javascript">') response.append(' window.onload = function ()') response.append(' { document.forms[0].submit(); }') response.append('</script>') response.append('</body>') return {'headers': [('Content-type', 'text/html')], 'data': response}
[ "def", "http_form_post_message", "(", "message", ",", "location", ",", "relay_state", "=", "''", ",", "typ", "=", "'SAMLRequest'", ")", ":", "response", "=", "[", "'<head>'", ",", "'<title>SAML 2.0 POST</title>'", ",", "'</head><body>'", "]", "if", "(", "not", ...
the http post binding defines a mechanism by which saml protocol messages may be transmitted within the base64-encoded content of a html form control .
train
false
25,678
def register_generic_role(canonical_name, node_class): role = GenericRole(canonical_name, node_class) register_canonical_role(canonical_name, role)
[ "def", "register_generic_role", "(", "canonical_name", ",", "node_class", ")", ":", "role", "=", "GenericRole", "(", "canonical_name", ",", "node_class", ")", "register_canonical_role", "(", "canonical_name", ",", "role", ")" ]
for roles which simply wrap a given node_class around the text .
train
false
25,679
def PrepareCmake(): if (os.environ['BUILDBOT_CLOBBER'] == '1'): print '@@@BUILD_STEP Clobber CMake checkout@@@' shutil.rmtree(CMAKE_DIR) if os.path.isdir(CMAKE_DIR): return print '@@@BUILD_STEP Initialize CMake checkout@@@' os.mkdir(CMAKE_DIR) print '@@@BUILD_STEP Sync CMake@@@' CallSubProcess(['git', 'clone', '--depth', '1', '--single-branch', '--branch', 'v2.8.8', '--', 'git://cmake.org/cmake.git', CMAKE_DIR], cwd=CMAKE_DIR) print '@@@BUILD_STEP Build CMake@@@' CallSubProcess(['/bin/bash', 'bootstrap', ('--prefix=%s' % CMAKE_DIR)], cwd=CMAKE_DIR) CallSubProcess(['make', 'cmake'], cwd=CMAKE_DIR)
[ "def", "PrepareCmake", "(", ")", ":", "if", "(", "os", ".", "environ", "[", "'BUILDBOT_CLOBBER'", "]", "==", "'1'", ")", ":", "print", "'@@@BUILD_STEP Clobber CMake checkout@@@'", "shutil", ".", "rmtree", "(", "CMAKE_DIR", ")", "if", "os", ".", "path", ".", ...
build cmake 2 .
train
false
25,680
def get_reply_to_address(message): if message.first_message: first_message = Message._byID(message.first_message, data=True) else: first_message = message email_id = first_message._id36 email_mac = hmac.new(g.secrets['modmail_email_secret'], email_id, hashlib.sha256).hexdigest() reply_id = 'zendeskreply+{email_id}-{email_mac}'.format(email_id=email_id, email_mac=email_mac) sr = Subreddit._byID(message.sr_id, data=True) return 'r/{subreddit} mail <{reply_id}@{domain}>'.format(subreddit=sr.name, reply_id=reply_id, domain=g.modmail_email_domain)
[ "def", "get_reply_to_address", "(", "message", ")", ":", "if", "message", ".", "first_message", ":", "first_message", "=", "Message", ".", "_byID", "(", "message", ".", "first_message", ",", "data", "=", "True", ")", "else", ":", "first_message", "=", "messa...
construct a reply-to address that encodes the message id .
train
false
25,681
def build_request_spec(ctxt, image, instances, instance_type=None): instance = instances[0] if (instance_type is None): if isinstance(instance, obj_instance.Instance): instance_type = instance.get_flavor() else: instance_type = flavors.extract_flavor(instance) if isinstance(instance, obj_instance.Instance): instance = obj_base.obj_to_primitive(instance) instance['system_metadata'] = dict(instance.get('system_metadata', {})) if isinstance(instance_type, objects.Flavor): instance_type = obj_base.obj_to_primitive(instance_type) try: flavors.save_flavor_info(instance.get('system_metadata', {}), instance_type) except KeyError: pass request_spec = {'image': (image or {}), 'instance_properties': instance, 'instance_type': instance_type, 'num_instances': len(instances)} return jsonutils.to_primitive(request_spec)
[ "def", "build_request_spec", "(", "ctxt", ",", "image", ",", "instances", ",", "instance_type", "=", "None", ")", ":", "instance", "=", "instances", "[", "0", "]", "if", "(", "instance_type", "is", "None", ")", ":", "if", "isinstance", "(", "instance", "...
build a request_spec for the scheduler .
train
false
25,682
def _parse_fmdump_verbose(output): result = [] output = output.split('\n') fault = [] verbose_fault = {} for line in output: if line.startswith('TIME'): fault.append(line) if (len(verbose_fault) > 0): result.append(verbose_fault) verbose_fault = {} elif (len(fault) == 1): fault.append(line) verbose_fault = _parse_fmdump('\n'.join(fault))[0] fault = [] elif (len(verbose_fault) > 0): if ('details' not in verbose_fault): verbose_fault['details'] = '' if (line.strip() == ''): continue verbose_fault['details'] = '{0}{1}\n'.format(verbose_fault['details'], line) if (len(verbose_fault) > 0): result.append(verbose_fault) return result
[ "def", "_parse_fmdump_verbose", "(", "output", ")", ":", "result", "=", "[", "]", "output", "=", "output", ".", "split", "(", "'\\n'", ")", "fault", "=", "[", "]", "verbose_fault", "=", "{", "}", "for", "line", "in", "output", ":", "if", "line", ".",...
parses fmdump verbose output .
train
true
25,692
def list_folders(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The list_folders function must be called with -f or --function.') return {'Folders': salt.utils.vmware.list_folders(_get_si())}
[ "def", "list_folders", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_folders function must be called with -f or --function.'", ")", "return", "{", "'...
list all the folders for this vmware environment cli example: .
train
true
25,693
def xmlrpc_return(start_response, service, method, params, string_faultcode=False): try: result = odoo.http.dispatch_rpc(service, method, params) response = xmlrpclib.dumps((result,), methodresponse=1, allow_none=False, encoding=None) except Exception as e: if string_faultcode: response = xmlrpc_handle_exception_string(e) else: response = xmlrpc_handle_exception_int(e) start_response('200 OK', [('Content-Type', 'text/xml'), ('Content-Length', str(len(response)))]) return [response]
[ "def", "xmlrpc_return", "(", "start_response", ",", "service", ",", "method", ",", "params", ",", "string_faultcode", "=", "False", ")", ":", "try", ":", "result", "=", "odoo", ".", "http", ".", "dispatch_rpc", "(", "service", ",", "method", ",", "params",...
helper to call a services method with some params .
train
false
25,694
def get_parameter_by_name(device, name): for i in device.parameters: if (i.original_name == name): return i
[ "def", "get_parameter_by_name", "(", "device", ",", "name", ")", ":", "for", "i", "in", "device", ".", "parameters", ":", "if", "(", "i", ".", "original_name", "==", "name", ")", ":", "return", "i" ]
find the given devices parameter that belongs to the given name .
train
false
25,695
def test_correct_pip_version(script): result = script.pip('--version') dir = re.match('pip \\d(\\.[\\d])+(\\.?(rc|dev|pre|post)\\d+)? from (.*) \\(python \\d(.[\\d])+\\)$', result.stdout).group(4) pip_folder = join(SRC_DIR, 'pip') pip_folder_outputed = join(dir, 'pip') diffs = filecmp.dircmp(pip_folder, pip_folder_outputed) mismatch_py = [x for x in ((diffs.left_only + diffs.right_only) + diffs.diff_files) if x.endswith('.py')] assert (not mismatch_py), ('mismatched source files in %r and %r: %r' % (pip_folder, pip_folder_outputed, mismatch_py))
[ "def", "test_correct_pip_version", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'--version'", ")", "dir", "=", "re", ".", "match", "(", "'pip \\\\d(\\\\.[\\\\d])+(\\\\.?(rc|dev|pre|post)\\\\d+)? from (.*) \\\\(python \\\\d(.[\\\\d])+\\\\)$'", ",", "...
check we are importing pip from the right place .
train
false
25,696
def record_user_created_an_exploration(user_id): user_settings = get_user_settings(user_id) if user_settings: user_settings.last_created_an_exploration = datetime.datetime.utcnow() _save_user_settings(user_settings)
[ "def", "record_user_created_an_exploration", "(", "user_id", ")", ":", "user_settings", "=", "get_user_settings", "(", "user_id", ")", "if", "user_settings", ":", "user_settings", ".", "last_created_an_exploration", "=", "datetime", ".", "datetime", ".", "utcnow", "("...
updates last_created_an_exploration to the current datetime for the user with given user_id .
train
false
25,697
def minimals(A, le=(lambda x, y: (x <= y))): r = [] for x in A: for a in A: if (le(a, x) and (not le(x, a))): break else: for a in r: if le(a, x): break else: r.append(x) return r
[ "def", "minimals", "(", "A", ",", "le", "=", "(", "lambda", "x", ",", "y", ":", "(", "x", "<=", "y", ")", ")", ")", ":", "r", "=", "[", "]", "for", "x", "in", "A", ":", "for", "a", "in", "A", ":", "if", "(", "le", "(", "a", ",", "x", ...
find the minimal element(s) of a sequence of partially ordered elements .
train
false
25,698
def formset_view(request): if (request.method == 'POST'): formset = TestFormSet(request.POST) if formset.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ my_formset.errors }}', name='Invalid POST Template') c = Context({'my_formset': formset}) else: formset = TestForm(request.GET) t = Template('Viewing base formset. {{ my_formset }}.', name='Formset GET Template') c = Context({'my_formset': formset}) return HttpResponse(t.render(c))
[ "def", "formset_view", "(", "request", ")", ":", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "formset", "=", "TestFormSet", "(", "request", ".", "POST", ")", "if", "formset", ".", "is_valid", "(", ")", ":", "t", "=", "Template", "(...
a view that tests a simple formset .
train
false