id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
41,458 | def _incremental_fit_estimator(estimator, X, y, classes, train, test, train_sizes, scorer, verbose):
(train_scores, test_scores) = ([], [])
partitions = zip(train_sizes, np.split(train, train_sizes)[:(-1)])
for (n_train_samples, partial_train) in partitions:
train_subset = train[:n_train_samples]
(X_train, y_train) = _safe_split(estimator, X, y, train_subset)
(X_partial_train, y_partial_train) = _safe_split(estimator, X, y, partial_train)
(X_test, y_test) = _safe_split(estimator, X, y, test, train_subset)
if (y_partial_train is None):
estimator.partial_fit(X_partial_train, classes=classes)
else:
estimator.partial_fit(X_partial_train, y_partial_train, classes=classes)
train_scores.append(_score(estimator, X_train, y_train, scorer))
test_scores.append(_score(estimator, X_test, y_test, scorer))
return np.array((train_scores, test_scores)).T
| [
"def",
"_incremental_fit_estimator",
"(",
"estimator",
",",
"X",
",",
"y",
",",
"classes",
",",
"train",
",",
"test",
",",
"train_sizes",
",",
"scorer",
",",
"verbose",
")",
":",
"(",
"train_scores",
",",
"test_scores",
")",
"=",
"(",
"[",
"]",
",",
"[",
"]",
")",
"partitions",
"=",
"zip",
"(",
"train_sizes",
",",
"np",
".",
"split",
"(",
"train",
",",
"train_sizes",
")",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
"for",
"(",
"n_train_samples",
",",
"partial_train",
")",
"in",
"partitions",
":",
"train_subset",
"=",
"train",
"[",
":",
"n_train_samples",
"]",
"(",
"X_train",
",",
"y_train",
")",
"=",
"_safe_split",
"(",
"estimator",
",",
"X",
",",
"y",
",",
"train_subset",
")",
"(",
"X_partial_train",
",",
"y_partial_train",
")",
"=",
"_safe_split",
"(",
"estimator",
",",
"X",
",",
"y",
",",
"partial_train",
")",
"(",
"X_test",
",",
"y_test",
")",
"=",
"_safe_split",
"(",
"estimator",
",",
"X",
",",
"y",
",",
"test",
",",
"train_subset",
")",
"if",
"(",
"y_partial_train",
"is",
"None",
")",
":",
"estimator",
".",
"partial_fit",
"(",
"X_partial_train",
",",
"classes",
"=",
"classes",
")",
"else",
":",
"estimator",
".",
"partial_fit",
"(",
"X_partial_train",
",",
"y_partial_train",
",",
"classes",
"=",
"classes",
")",
"train_scores",
".",
"append",
"(",
"_score",
"(",
"estimator",
",",
"X_train",
",",
"y_train",
",",
"scorer",
")",
")",
"test_scores",
".",
"append",
"(",
"_score",
"(",
"estimator",
",",
"X_test",
",",
"y_test",
",",
"scorer",
")",
")",
"return",
"np",
".",
"array",
"(",
"(",
"train_scores",
",",
"test_scores",
")",
")",
".",
"T"
] | train estimator on training subsets incrementally and compute scores . | train | false |
41,459 | def get_english_lang():
try:
return Language.objects.get_default().id
except (Language.DoesNotExist, OperationalError):
return 65535
| [
"def",
"get_english_lang",
"(",
")",
":",
"try",
":",
"return",
"Language",
".",
"objects",
".",
"get_default",
"(",
")",
".",
"id",
"except",
"(",
"Language",
".",
"DoesNotExist",
",",
"OperationalError",
")",
":",
"return",
"65535"
] | returns object id for english language . | train | false |
41,460 | def _read_config(conf_file=None):
if (conf_file is None):
paths = ('/etc/supervisor/supervisord.conf', '/etc/supervisord.conf')
for path in paths:
if os.path.exists(path):
conf_file = path
break
if (conf_file is None):
raise CommandExecutionError('No suitable config file found')
config = configparser.ConfigParser()
try:
config.read(conf_file)
except (IOError, OSError) as exc:
raise CommandExecutionError('Unable to read from {0}: {1}'.format(conf_file, exc))
return config
| [
"def",
"_read_config",
"(",
"conf_file",
"=",
"None",
")",
":",
"if",
"(",
"conf_file",
"is",
"None",
")",
":",
"paths",
"=",
"(",
"'/etc/supervisor/supervisord.conf'",
",",
"'/etc/supervisord.conf'",
")",
"for",
"path",
"in",
"paths",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"conf_file",
"=",
"path",
"break",
"if",
"(",
"conf_file",
"is",
"None",
")",
":",
"raise",
"CommandExecutionError",
"(",
"'No suitable config file found'",
")",
"config",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"try",
":",
"config",
".",
"read",
"(",
"conf_file",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
"as",
"exc",
":",
"raise",
"CommandExecutionError",
"(",
"'Unable to read from {0}: {1}'",
".",
"format",
"(",
"conf_file",
",",
"exc",
")",
")",
"return",
"config"
] | reads the config file using configparser . | train | true |
41,461 | @require_context
@pick_context_manager_writer
def instance_info_cache_delete(context, instance_uuid):
model_query(context, models.InstanceInfoCache).filter_by(instance_uuid=instance_uuid).soft_delete()
| [
"@",
"require_context",
"@",
"pick_context_manager_writer",
"def",
"instance_info_cache_delete",
"(",
"context",
",",
"instance_uuid",
")",
":",
"model_query",
"(",
"context",
",",
"models",
".",
"InstanceInfoCache",
")",
".",
"filter_by",
"(",
"instance_uuid",
"=",
"instance_uuid",
")",
".",
"soft_delete",
"(",
")"
] | deletes an existing instance_info_cache record . | train | false |
41,462 | def getTransformedVector3(tetragrid, vector3):
if getIsIdentityTetragridOrNone(tetragrid):
return vector3.copy()
return getTransformedVector3Blindly(tetragrid, vector3)
| [
"def",
"getTransformedVector3",
"(",
"tetragrid",
",",
"vector3",
")",
":",
"if",
"getIsIdentityTetragridOrNone",
"(",
"tetragrid",
")",
":",
"return",
"vector3",
".",
"copy",
"(",
")",
"return",
"getTransformedVector3Blindly",
"(",
"tetragrid",
",",
"vector3",
")"
] | get the vector3 multiplied by a matrix . | train | false |
41,463 | def test_password_strength(password, user_inputs=None):
result = zxcvbn.password_strength(password, user_inputs)
result[u'feedback'] = get_feedback(result[u'score'], result[u'match_sequence'])
return result
| [
"def",
"test_password_strength",
"(",
"password",
",",
"user_inputs",
"=",
"None",
")",
":",
"result",
"=",
"zxcvbn",
".",
"password_strength",
"(",
"password",
",",
"user_inputs",
")",
"result",
"[",
"u'feedback'",
"]",
"=",
"get_feedback",
"(",
"result",
"[",
"u'score'",
"]",
",",
"result",
"[",
"u'match_sequence'",
"]",
")",
"return",
"result"
] | wrapper around zxcvbn . | train | false |
41,464 | def sigmaclip(a, low=4.0, high=4.0):
c = np.asarray(a).ravel()
delta = 1
while delta:
c_std = c.std()
c_mean = c.mean()
size = c.size
critlower = (c_mean - (c_std * low))
critupper = (c_mean + (c_std * high))
c = c[((c > critlower) & (c < critupper))]
delta = (size - c.size)
return SigmaclipResult(c, critlower, critupper)
| [
"def",
"sigmaclip",
"(",
"a",
",",
"low",
"=",
"4.0",
",",
"high",
"=",
"4.0",
")",
":",
"c",
"=",
"np",
".",
"asarray",
"(",
"a",
")",
".",
"ravel",
"(",
")",
"delta",
"=",
"1",
"while",
"delta",
":",
"c_std",
"=",
"c",
".",
"std",
"(",
")",
"c_mean",
"=",
"c",
".",
"mean",
"(",
")",
"size",
"=",
"c",
".",
"size",
"critlower",
"=",
"(",
"c_mean",
"-",
"(",
"c_std",
"*",
"low",
")",
")",
"critupper",
"=",
"(",
"c_mean",
"+",
"(",
"c_std",
"*",
"high",
")",
")",
"c",
"=",
"c",
"[",
"(",
"(",
"c",
">",
"critlower",
")",
"&",
"(",
"c",
"<",
"critupper",
")",
")",
"]",
"delta",
"=",
"(",
"size",
"-",
"c",
".",
"size",
")",
"return",
"SigmaclipResult",
"(",
"c",
",",
"critlower",
",",
"critupper",
")"
] | iterative sigma-clipping of array elements . | train | false |
41,466 | def output():
return s3_rest_controller()
| [
"def",
"output",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] | dont display data . | train | false |
41,468 | def _setHTTPReferer():
if conf.referer:
debugMsg = 'setting the HTTP Referer header'
logger.debug(debugMsg)
conf.httpHeaders[HTTP_HEADER.REFERER] = conf.referer
| [
"def",
"_setHTTPReferer",
"(",
")",
":",
"if",
"conf",
".",
"referer",
":",
"debugMsg",
"=",
"'setting the HTTP Referer header'",
"logger",
".",
"debug",
"(",
"debugMsg",
")",
"conf",
".",
"httpHeaders",
"[",
"HTTP_HEADER",
".",
"REFERER",
"]",
"=",
"conf",
".",
"referer"
] | set the http referer . | train | false |
41,469 | def register_class(class_):
manager = manager_of_class(class_)
if (manager is None):
manager = _instrumentation_factory.create_manager_for_cls(class_)
return manager
| [
"def",
"register_class",
"(",
"class_",
")",
":",
"manager",
"=",
"manager_of_class",
"(",
"class_",
")",
"if",
"(",
"manager",
"is",
"None",
")",
":",
"manager",
"=",
"_instrumentation_factory",
".",
"create_manager_for_cls",
"(",
"class_",
")",
"return",
"manager"
] | register class instrumentation . | train | false |
41,470 | def iterload_objects(import_paths):
return map(load_object, import_paths)
| [
"def",
"iterload_objects",
"(",
"import_paths",
")",
":",
"return",
"map",
"(",
"load_object",
",",
"import_paths",
")"
] | load a list of objects . | train | false |
41,471 | def corr_equi(k_vars, rho):
corr = np.empty((k_vars, k_vars))
corr.fill(rho)
corr[np.diag_indices_from(corr)] = 1
return corr
| [
"def",
"corr_equi",
"(",
"k_vars",
",",
"rho",
")",
":",
"corr",
"=",
"np",
".",
"empty",
"(",
"(",
"k_vars",
",",
"k_vars",
")",
")",
"corr",
".",
"fill",
"(",
"rho",
")",
"corr",
"[",
"np",
".",
"diag_indices_from",
"(",
"corr",
")",
"]",
"=",
"1",
"return",
"corr"
] | create equicorrelated correlation matrix with rho on off diagonal parameters k_vars : int number of variables . | train | false |
41,472 | def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, **col_name_col_instance):
if (migrate_engine.name in ['mysql', 'postgresql']):
meta = MetaData()
meta.bind = migrate_engine
t = Table(table_name, meta, autoload=True)
uc = UniqueConstraint(table=t, name=uc_name, *columns)
uc.drop()
else:
_drop_unique_constraint_in_sqlite(migrate_engine, table_name, uc_name, **col_name_col_instance)
| [
"def",
"drop_unique_constraint",
"(",
"migrate_engine",
",",
"table_name",
",",
"uc_name",
",",
"*",
"columns",
",",
"**",
"col_name_col_instance",
")",
":",
"if",
"(",
"migrate_engine",
".",
"name",
"in",
"[",
"'mysql'",
",",
"'postgresql'",
"]",
")",
":",
"meta",
"=",
"MetaData",
"(",
")",
"meta",
".",
"bind",
"=",
"migrate_engine",
"t",
"=",
"Table",
"(",
"table_name",
",",
"meta",
",",
"autoload",
"=",
"True",
")",
"uc",
"=",
"UniqueConstraint",
"(",
"table",
"=",
"t",
",",
"name",
"=",
"uc_name",
",",
"*",
"columns",
")",
"uc",
".",
"drop",
"(",
")",
"else",
":",
"_drop_unique_constraint_in_sqlite",
"(",
"migrate_engine",
",",
"table_name",
",",
"uc_name",
",",
"**",
"col_name_col_instance",
")"
] | this method drops uc from table and works for mysql . | train | false |
41,473 | def make_compensator(info, from_, to, exclude_comp_chs=False):
if (from_ == to):
return None
if (from_ != 0):
C1 = _make_compensator(info, from_)
comp_from_0 = linalg.inv((np.eye(info['nchan']) - C1))
if (to != 0):
C2 = _make_compensator(info, to)
comp_0_to = (np.eye(info['nchan']) - C2)
if (from_ != 0):
if (to != 0):
comp = np.dot(comp_0_to, comp_from_0)
else:
comp = comp_from_0
else:
comp = comp_0_to
if exclude_comp_chs:
pick = [k for (k, c) in enumerate(info['chs']) if (c['kind'] != FIFF.FIFFV_REF_MEG_CH)]
if (len(pick) == 0):
raise ValueError('Nothing remains after excluding the compensation channels')
comp = comp[pick, :]
return comp
| [
"def",
"make_compensator",
"(",
"info",
",",
"from_",
",",
"to",
",",
"exclude_comp_chs",
"=",
"False",
")",
":",
"if",
"(",
"from_",
"==",
"to",
")",
":",
"return",
"None",
"if",
"(",
"from_",
"!=",
"0",
")",
":",
"C1",
"=",
"_make_compensator",
"(",
"info",
",",
"from_",
")",
"comp_from_0",
"=",
"linalg",
".",
"inv",
"(",
"(",
"np",
".",
"eye",
"(",
"info",
"[",
"'nchan'",
"]",
")",
"-",
"C1",
")",
")",
"if",
"(",
"to",
"!=",
"0",
")",
":",
"C2",
"=",
"_make_compensator",
"(",
"info",
",",
"to",
")",
"comp_0_to",
"=",
"(",
"np",
".",
"eye",
"(",
"info",
"[",
"'nchan'",
"]",
")",
"-",
"C2",
")",
"if",
"(",
"from_",
"!=",
"0",
")",
":",
"if",
"(",
"to",
"!=",
"0",
")",
":",
"comp",
"=",
"np",
".",
"dot",
"(",
"comp_0_to",
",",
"comp_from_0",
")",
"else",
":",
"comp",
"=",
"comp_from_0",
"else",
":",
"comp",
"=",
"comp_0_to",
"if",
"exclude_comp_chs",
":",
"pick",
"=",
"[",
"k",
"for",
"(",
"k",
",",
"c",
")",
"in",
"enumerate",
"(",
"info",
"[",
"'chs'",
"]",
")",
"if",
"(",
"c",
"[",
"'kind'",
"]",
"!=",
"FIFF",
".",
"FIFFV_REF_MEG_CH",
")",
"]",
"if",
"(",
"len",
"(",
"pick",
")",
"==",
"0",
")",
":",
"raise",
"ValueError",
"(",
"'Nothing remains after excluding the compensation channels'",
")",
"comp",
"=",
"comp",
"[",
"pick",
",",
":",
"]",
"return",
"comp"
] | return compensation matrix eg . | train | false |
41,475 | def get_work_dim(*args, **kargs):
raise _stub_error
| [
"def",
"get_work_dim",
"(",
"*",
"args",
",",
"**",
"kargs",
")",
":",
"raise",
"_stub_error"
] | opencl get_work_dim() . | train | false |
41,476 | def sub_pre(e):
reps = [a for a in e.atoms(Add) if a.could_extract_minus_sign()]
reps.sort(key=default_sort_key)
e = e.xreplace(dict(((a, Mul._from_args([S.NegativeOne, (- a)])) for a in reps)))
if isinstance(e, Basic):
negs = {}
for a in sorted(e.atoms(Add), key=default_sort_key):
if ((a in reps) or a.could_extract_minus_sign()):
negs[a] = Mul._from_args([S.One, S.NegativeOne, (- a)])
e = e.xreplace(negs)
return e
| [
"def",
"sub_pre",
"(",
"e",
")",
":",
"reps",
"=",
"[",
"a",
"for",
"a",
"in",
"e",
".",
"atoms",
"(",
"Add",
")",
"if",
"a",
".",
"could_extract_minus_sign",
"(",
")",
"]",
"reps",
".",
"sort",
"(",
"key",
"=",
"default_sort_key",
")",
"e",
"=",
"e",
".",
"xreplace",
"(",
"dict",
"(",
"(",
"(",
"a",
",",
"Mul",
".",
"_from_args",
"(",
"[",
"S",
".",
"NegativeOne",
",",
"(",
"-",
"a",
")",
"]",
")",
")",
"for",
"a",
"in",
"reps",
")",
")",
")",
"if",
"isinstance",
"(",
"e",
",",
"Basic",
")",
":",
"negs",
"=",
"{",
"}",
"for",
"a",
"in",
"sorted",
"(",
"e",
".",
"atoms",
"(",
"Add",
")",
",",
"key",
"=",
"default_sort_key",
")",
":",
"if",
"(",
"(",
"a",
"in",
"reps",
")",
"or",
"a",
".",
"could_extract_minus_sign",
"(",
")",
")",
":",
"negs",
"[",
"a",
"]",
"=",
"Mul",
".",
"_from_args",
"(",
"[",
"S",
".",
"One",
",",
"S",
".",
"NegativeOne",
",",
"(",
"-",
"a",
")",
"]",
")",
"e",
"=",
"e",
".",
"xreplace",
"(",
"negs",
")",
"return",
"e"
] | replace y - x with - if -1 can be extracted from y - x . | train | false |
41,477 | def get_all_access_keys(user_name, marker=None, max_items=None, region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
return conn.get_all_access_keys(user_name, marker, max_items)
except boto.exception.BotoServerError as e:
log.debug(e)
log.error("Failed to get user's {0} access keys.".format(user_name))
return str(e)
| [
"def",
"get_all_access_keys",
"(",
"user_name",
",",
"marker",
"=",
"None",
",",
"max_items",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"try",
":",
"return",
"conn",
".",
"get_all_access_keys",
"(",
"user_name",
",",
"marker",
",",
"max_items",
")",
"except",
"boto",
".",
"exception",
".",
"BotoServerError",
"as",
"e",
":",
"log",
".",
"debug",
"(",
"e",
")",
"log",
".",
"error",
"(",
"\"Failed to get user's {0} access keys.\"",
".",
"format",
"(",
"user_name",
")",
")",
"return",
"str",
"(",
"e",
")"
] | get all access keys from a user . | train | true |
41,478 | def react(main, argv=(), _reactor=None):
if (_reactor is None):
from twisted.internet import reactor as _reactor
finished = main(_reactor, *argv)
codes = [0]
stopping = []
_reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True)
def stop(result, stopReactor):
if stopReactor:
try:
_reactor.stop()
except ReactorNotRunning:
pass
if isinstance(result, Failure):
if (result.check(SystemExit) is not None):
code = result.value.code
else:
log.err(result, 'main function encountered error')
code = 1
codes[0] = code
def cbFinish(result):
if stopping:
stop(result, False)
else:
_reactor.callWhenRunning(stop, result, True)
finished.addBoth(cbFinish)
_reactor.run()
sys.exit(codes[0])
| [
"def",
"react",
"(",
"main",
",",
"argv",
"=",
"(",
")",
",",
"_reactor",
"=",
"None",
")",
":",
"if",
"(",
"_reactor",
"is",
"None",
")",
":",
"from",
"twisted",
".",
"internet",
"import",
"reactor",
"as",
"_reactor",
"finished",
"=",
"main",
"(",
"_reactor",
",",
"*",
"argv",
")",
"codes",
"=",
"[",
"0",
"]",
"stopping",
"=",
"[",
"]",
"_reactor",
".",
"addSystemEventTrigger",
"(",
"'before'",
",",
"'shutdown'",
",",
"stopping",
".",
"append",
",",
"True",
")",
"def",
"stop",
"(",
"result",
",",
"stopReactor",
")",
":",
"if",
"stopReactor",
":",
"try",
":",
"_reactor",
".",
"stop",
"(",
")",
"except",
"ReactorNotRunning",
":",
"pass",
"if",
"isinstance",
"(",
"result",
",",
"Failure",
")",
":",
"if",
"(",
"result",
".",
"check",
"(",
"SystemExit",
")",
"is",
"not",
"None",
")",
":",
"code",
"=",
"result",
".",
"value",
".",
"code",
"else",
":",
"log",
".",
"err",
"(",
"result",
",",
"'main function encountered error'",
")",
"code",
"=",
"1",
"codes",
"[",
"0",
"]",
"=",
"code",
"def",
"cbFinish",
"(",
"result",
")",
":",
"if",
"stopping",
":",
"stop",
"(",
"result",
",",
"False",
")",
"else",
":",
"_reactor",
".",
"callWhenRunning",
"(",
"stop",
",",
"result",
",",
"True",
")",
"finished",
".",
"addBoth",
"(",
"cbFinish",
")",
"_reactor",
".",
"run",
"(",
")",
"sys",
".",
"exit",
"(",
"codes",
"[",
"0",
"]",
")"
] | call c{main} and run the reactor until the l{deferred} it returns fires . | train | false |
41,479 | def noHint(str):
return re.sub('(^.*?)( ?\\(.+?\\))?$', '\\1', str)
| [
"def",
"noHint",
"(",
"str",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'(^.*?)( ?\\\\(.+?\\\\))?$'",
",",
"'\\\\1'",
",",
"str",
")"
] | remove translation hint from end of string . | train | false |
41,480 | def theme_data():
field = s3db.gis_layer_theme_id()
field.requires = IS_EMPTY_OR(field.requires)
output = s3_rest_controller(csv_extra_fields=[dict(label='Layer', field=field)])
return output
| [
"def",
"theme_data",
"(",
")",
":",
"field",
"=",
"s3db",
".",
"gis_layer_theme_id",
"(",
")",
"field",
".",
"requires",
"=",
"IS_EMPTY_OR",
"(",
"field",
".",
"requires",
")",
"output",
"=",
"s3_rest_controller",
"(",
"csv_extra_fields",
"=",
"[",
"dict",
"(",
"label",
"=",
"'Layer'",
",",
"field",
"=",
"field",
")",
"]",
")",
"return",
"output"
] | restful crud controller . | train | false |
41,482 | def recursive_set_rng_kwarg(expr, rng=None):
if (rng is None):
rng = np.random.RandomState()
lrng = as_apply(rng)
for node in dfs(expr):
if (node.name in implicit_stochastic_symbols):
for (ii, (name, arg)) in enumerate(list(node.named_args)):
if (name == 'rng'):
node.named_args[ii] = ('rng', lrng)
break
else:
node.named_args.append(('rng', lrng))
return expr
| [
"def",
"recursive_set_rng_kwarg",
"(",
"expr",
",",
"rng",
"=",
"None",
")",
":",
"if",
"(",
"rng",
"is",
"None",
")",
":",
"rng",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
")",
"lrng",
"=",
"as_apply",
"(",
"rng",
")",
"for",
"node",
"in",
"dfs",
"(",
"expr",
")",
":",
"if",
"(",
"node",
".",
"name",
"in",
"implicit_stochastic_symbols",
")",
":",
"for",
"(",
"ii",
",",
"(",
"name",
",",
"arg",
")",
")",
"in",
"enumerate",
"(",
"list",
"(",
"node",
".",
"named_args",
")",
")",
":",
"if",
"(",
"name",
"==",
"'rng'",
")",
":",
"node",
".",
"named_args",
"[",
"ii",
"]",
"=",
"(",
"'rng'",
",",
"lrng",
")",
"break",
"else",
":",
"node",
".",
"named_args",
".",
"append",
"(",
"(",
"'rng'",
",",
"lrng",
")",
")",
"return",
"expr"
] | make all of the stochastic nodes in expr use the rng uniform -> uniform . | train | false |
41,483 | def getSpacedPortionDirections(interpolationDictionary):
portionDirections = []
for interpolationDictionaryValue in interpolationDictionary.values():
portionDirections += interpolationDictionaryValue.portionDirections
portionDirections.sort(comparePortionDirection)
if (len(portionDirections) < 1):
return []
spacedPortionDirections = [portionDirections[0]]
for portionDirection in portionDirections[1:]:
addSpacedPortionDirection(portionDirection, spacedPortionDirections)
return spacedPortionDirections
| [
"def",
"getSpacedPortionDirections",
"(",
"interpolationDictionary",
")",
":",
"portionDirections",
"=",
"[",
"]",
"for",
"interpolationDictionaryValue",
"in",
"interpolationDictionary",
".",
"values",
"(",
")",
":",
"portionDirections",
"+=",
"interpolationDictionaryValue",
".",
"portionDirections",
"portionDirections",
".",
"sort",
"(",
"comparePortionDirection",
")",
"if",
"(",
"len",
"(",
"portionDirections",
")",
"<",
"1",
")",
":",
"return",
"[",
"]",
"spacedPortionDirections",
"=",
"[",
"portionDirections",
"[",
"0",
"]",
"]",
"for",
"portionDirection",
"in",
"portionDirections",
"[",
"1",
":",
"]",
":",
"addSpacedPortionDirection",
"(",
"portionDirection",
",",
"spacedPortionDirections",
")",
"return",
"spacedPortionDirections"
] | get sorted portion directions . | train | false |
41,484 | def get_url_field():
from django.forms import URLField
field = URLField()
try:
field = URLField(verify_exists=False)
except TypeError as e:
pass
return field
| [
"def",
"get_url_field",
"(",
")",
":",
"from",
"django",
".",
"forms",
"import",
"URLField",
"field",
"=",
"URLField",
"(",
")",
"try",
":",
"field",
"=",
"URLField",
"(",
"verify_exists",
"=",
"False",
")",
"except",
"TypeError",
"as",
"e",
":",
"pass",
"return",
"field"
] | this should be compatible with both django 1 . | train | false |
41,485 | def list_to_local(acs, attrlist, allow_unknown_attributes=False):
if (not acs):
acs = [AttributeConverter()]
acsd = {'': acs}
else:
acsd = dict([(a.name_format, a) for a in acs])
ava = {}
for attr in attrlist:
try:
_func = acsd[attr.name_format].ava_from
except KeyError:
if ((attr.name_format == NAME_FORMAT_UNSPECIFIED) or allow_unknown_attributes):
_func = acs[0].lcd_ava_from
else:
logger.info(('Unsupported attribute name format: %s' % (attr.name_format,)))
continue
try:
(key, val) = _func(attr)
except KeyError:
if allow_unknown_attributes:
(key, val) = acs[0].lcd_ava_from(attr)
else:
logger.info(('Unknown attribute name: %s' % (attr,)))
continue
except AttributeError:
continue
try:
ava[key].extend(val)
except KeyError:
ava[key] = val
return ava
| [
"def",
"list_to_local",
"(",
"acs",
",",
"attrlist",
",",
"allow_unknown_attributes",
"=",
"False",
")",
":",
"if",
"(",
"not",
"acs",
")",
":",
"acs",
"=",
"[",
"AttributeConverter",
"(",
")",
"]",
"acsd",
"=",
"{",
"''",
":",
"acs",
"}",
"else",
":",
"acsd",
"=",
"dict",
"(",
"[",
"(",
"a",
".",
"name_format",
",",
"a",
")",
"for",
"a",
"in",
"acs",
"]",
")",
"ava",
"=",
"{",
"}",
"for",
"attr",
"in",
"attrlist",
":",
"try",
":",
"_func",
"=",
"acsd",
"[",
"attr",
".",
"name_format",
"]",
".",
"ava_from",
"except",
"KeyError",
":",
"if",
"(",
"(",
"attr",
".",
"name_format",
"==",
"NAME_FORMAT_UNSPECIFIED",
")",
"or",
"allow_unknown_attributes",
")",
":",
"_func",
"=",
"acs",
"[",
"0",
"]",
".",
"lcd_ava_from",
"else",
":",
"logger",
".",
"info",
"(",
"(",
"'Unsupported attribute name format: %s'",
"%",
"(",
"attr",
".",
"name_format",
",",
")",
")",
")",
"continue",
"try",
":",
"(",
"key",
",",
"val",
")",
"=",
"_func",
"(",
"attr",
")",
"except",
"KeyError",
":",
"if",
"allow_unknown_attributes",
":",
"(",
"key",
",",
"val",
")",
"=",
"acs",
"[",
"0",
"]",
".",
"lcd_ava_from",
"(",
"attr",
")",
"else",
":",
"logger",
".",
"info",
"(",
"(",
"'Unknown attribute name: %s'",
"%",
"(",
"attr",
",",
")",
")",
")",
"continue",
"except",
"AttributeError",
":",
"continue",
"try",
":",
"ava",
"[",
"key",
"]",
".",
"extend",
"(",
"val",
")",
"except",
"KeyError",
":",
"ava",
"[",
"key",
"]",
"=",
"val",
"return",
"ava"
] | replaces the attribute names in a attribute value assertion with the equivalent name from a local name format . | train | true |
41,486 | def get_default_support_url(request=None, force_is_admin=False):
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get(u'send_support_usage_stats'):
support_data = serialize_support_data(request, force_is_admin)
else:
support_data = u''
return (settings.DEFAULT_SUPPORT_URL % {u'support_data': support_data})
| [
"def",
"get_default_support_url",
"(",
"request",
"=",
"None",
",",
"force_is_admin",
"=",
"False",
")",
":",
"siteconfig",
"=",
"SiteConfiguration",
".",
"objects",
".",
"get_current",
"(",
")",
"if",
"siteconfig",
".",
"get",
"(",
"u'send_support_usage_stats'",
")",
":",
"support_data",
"=",
"serialize_support_data",
"(",
"request",
",",
"force_is_admin",
")",
"else",
":",
"support_data",
"=",
"u''",
"return",
"(",
"settings",
".",
"DEFAULT_SUPPORT_URL",
"%",
"{",
"u'support_data'",
":",
"support_data",
"}",
")"
] | return the url for the default review board support page . | train | false |
41,487 | def parse_request_data(request):
try:
data = json.loads((request.body or '{}'))
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid username or email address.'))
return data
| [
"def",
"parse_request_data",
"(",
"request",
")",
":",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"(",
"request",
".",
"body",
"or",
"'{}'",
")",
")",
"except",
"ValueError",
":",
"raise",
"ValueError",
"(",
"_",
"(",
"'The record is not in the correct format. Please add a valid username or email address.'",
")",
")",
"return",
"data"
] | parse and return request data . | train | false |
41,488 | def _extract_files_to_lint(file_diffs):
if (not file_diffs):
return []
lint_files = [f.name for f in file_diffs if (f.status.upper() in 'ACMRT')]
return lint_files
| [
"def",
"_extract_files_to_lint",
"(",
"file_diffs",
")",
":",
"if",
"(",
"not",
"file_diffs",
")",
":",
"return",
"[",
"]",
"lint_files",
"=",
"[",
"f",
".",
"name",
"for",
"f",
"in",
"file_diffs",
"if",
"(",
"f",
".",
"status",
".",
"upper",
"(",
")",
"in",
"'ACMRT'",
")",
"]",
"return",
"lint_files"
] | grab only files out of a list of filediffs that have a acmrt status . | train | false |
41,490 | def test_future_altaz():
from ...utils.exceptions import AstropyWarning
from ..builtin_frames import utils
if hasattr(utils, u'__warningregistry__'):
utils.__warningregistry__.clear()
with catch_warnings() as found_warnings:
location = EarthLocation(lat=(0 * u.deg), lon=(0 * u.deg))
t = Time(u'J2161')
SkyCoord((1 * u.deg), (2 * u.deg)).transform_to(AltAz(location=location, obstime=t))
messages_to_find = [u'Tried to get polar motions for times after IERS data is valid.']
if isinstance(iers.IERS_Auto.iers_table, iers.IERS_B):
messages_to_find.append(u'(some) times are outside of range covered by IERS table.')
messages_found = [False for _ in messages_to_find]
for w in found_warnings:
if issubclass(w.category, AstropyWarning):
for (i, message_to_find) in enumerate(messages_to_find):
if (message_to_find in str(w.message)):
messages_found[i] = True
assert all(messages_found)
| [
"def",
"test_future_altaz",
"(",
")",
":",
"from",
"...",
"utils",
".",
"exceptions",
"import",
"AstropyWarning",
"from",
".",
".",
"builtin_frames",
"import",
"utils",
"if",
"hasattr",
"(",
"utils",
",",
"u'__warningregistry__'",
")",
":",
"utils",
".",
"__warningregistry__",
".",
"clear",
"(",
")",
"with",
"catch_warnings",
"(",
")",
"as",
"found_warnings",
":",
"location",
"=",
"EarthLocation",
"(",
"lat",
"=",
"(",
"0",
"*",
"u",
".",
"deg",
")",
",",
"lon",
"=",
"(",
"0",
"*",
"u",
".",
"deg",
")",
")",
"t",
"=",
"Time",
"(",
"u'J2161'",
")",
"SkyCoord",
"(",
"(",
"1",
"*",
"u",
".",
"deg",
")",
",",
"(",
"2",
"*",
"u",
".",
"deg",
")",
")",
".",
"transform_to",
"(",
"AltAz",
"(",
"location",
"=",
"location",
",",
"obstime",
"=",
"t",
")",
")",
"messages_to_find",
"=",
"[",
"u'Tried to get polar motions for times after IERS data is valid.'",
"]",
"if",
"isinstance",
"(",
"iers",
".",
"IERS_Auto",
".",
"iers_table",
",",
"iers",
".",
"IERS_B",
")",
":",
"messages_to_find",
".",
"append",
"(",
"u'(some) times are outside of range covered by IERS table.'",
")",
"messages_found",
"=",
"[",
"False",
"for",
"_",
"in",
"messages_to_find",
"]",
"for",
"w",
"in",
"found_warnings",
":",
"if",
"issubclass",
"(",
"w",
".",
"category",
",",
"AstropyWarning",
")",
":",
"for",
"(",
"i",
",",
"message_to_find",
")",
"in",
"enumerate",
"(",
"messages_to_find",
")",
":",
"if",
"(",
"message_to_find",
"in",
"str",
"(",
"w",
".",
"message",
")",
")",
":",
"messages_found",
"[",
"i",
"]",
"=",
"True",
"assert",
"all",
"(",
"messages_found",
")"
] | while this does test the full stack . | train | false |
41,492 | def readmailcapfile(fp):
caps = {}
while 1:
line = fp.readline()
if (not line):
break
if ((line[0] == '#') or (line.strip() == '')):
continue
nextline = line
while (nextline[(-2):] == '\\\n'):
nextline = fp.readline()
if (not nextline):
nextline = '\n'
line = (line[:(-2)] + nextline)
(key, fields) = parseline(line)
if (not (key and fields)):
continue
types = key.split('/')
for j in range(len(types)):
types[j] = types[j].strip()
key = '/'.join(types).lower()
if (key in caps):
caps[key].append(fields)
else:
caps[key] = [fields]
return caps
| [
"def",
"readmailcapfile",
"(",
"fp",
")",
":",
"caps",
"=",
"{",
"}",
"while",
"1",
":",
"line",
"=",
"fp",
".",
"readline",
"(",
")",
"if",
"(",
"not",
"line",
")",
":",
"break",
"if",
"(",
"(",
"line",
"[",
"0",
"]",
"==",
"'#'",
")",
"or",
"(",
"line",
".",
"strip",
"(",
")",
"==",
"''",
")",
")",
":",
"continue",
"nextline",
"=",
"line",
"while",
"(",
"nextline",
"[",
"(",
"-",
"2",
")",
":",
"]",
"==",
"'\\\\\\n'",
")",
":",
"nextline",
"=",
"fp",
".",
"readline",
"(",
")",
"if",
"(",
"not",
"nextline",
")",
":",
"nextline",
"=",
"'\\n'",
"line",
"=",
"(",
"line",
"[",
":",
"(",
"-",
"2",
")",
"]",
"+",
"nextline",
")",
"(",
"key",
",",
"fields",
")",
"=",
"parseline",
"(",
"line",
")",
"if",
"(",
"not",
"(",
"key",
"and",
"fields",
")",
")",
":",
"continue",
"types",
"=",
"key",
".",
"split",
"(",
"'/'",
")",
"for",
"j",
"in",
"range",
"(",
"len",
"(",
"types",
")",
")",
":",
"types",
"[",
"j",
"]",
"=",
"types",
"[",
"j",
"]",
".",
"strip",
"(",
")",
"key",
"=",
"'/'",
".",
"join",
"(",
"types",
")",
".",
"lower",
"(",
")",
"if",
"(",
"key",
"in",
"caps",
")",
":",
"caps",
"[",
"key",
"]",
".",
"append",
"(",
"fields",
")",
"else",
":",
"caps",
"[",
"key",
"]",
"=",
"[",
"fields",
"]",
"return",
"caps"
] | read a mailcap file and return a dictionary keyed by mime type . | train | false |
41,493 | @transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def problem_grade_report(request, course_id):
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
lms.djangoapps.instructor_task.api.submit_problem_grade_report(request, course_key)
success_status = _('The problem grade report is being created. To view the status of the report, see Pending Tasks below.')
return JsonResponse({'status': success_status})
except AlreadyRunningError:
already_running_status = _('A problem grade report is already being generated. To view the status of the report, see Pending Tasks below. You will be able to download the report when it is complete.')
return JsonResponse({'status': already_running_status})
| [
"@",
"transaction",
".",
"non_atomic_requests",
"@",
"require_POST",
"@",
"ensure_csrf_cookie",
"@",
"cache_control",
"(",
"no_cache",
"=",
"True",
",",
"no_store",
"=",
"True",
",",
"must_revalidate",
"=",
"True",
")",
"@",
"require_level",
"(",
"'staff'",
")",
"def",
"problem_grade_report",
"(",
"request",
",",
"course_id",
")",
":",
"course_key",
"=",
"SlashSeparatedCourseKey",
".",
"from_deprecated_string",
"(",
"course_id",
")",
"try",
":",
"lms",
".",
"djangoapps",
".",
"instructor_task",
".",
"api",
".",
"submit_problem_grade_report",
"(",
"request",
",",
"course_key",
")",
"success_status",
"=",
"_",
"(",
"'The problem grade report is being created. To view the status of the report, see Pending Tasks below.'",
")",
"return",
"JsonResponse",
"(",
"{",
"'status'",
":",
"success_status",
"}",
")",
"except",
"AlreadyRunningError",
":",
"already_running_status",
"=",
"_",
"(",
"'A problem grade report is already being generated. To view the status of the report, see Pending Tasks below. You will be able to download the report when it is complete.'",
")",
"return",
"JsonResponse",
"(",
"{",
"'status'",
":",
"already_running_status",
"}",
")"
] | request a csv showing students grades for all problems in the course . | train | false |
41,494 | def synchronized(lock=None):
if (lock is None):
lock = threading.Lock()
def wrapper(function):
def new_function(*args, **kwargs):
lock.acquire()
try:
return function(*args, **kwargs)
finally:
lock.release()
return new_function
return wrapper
| [
"def",
"synchronized",
"(",
"lock",
"=",
"None",
")",
":",
"if",
"(",
"lock",
"is",
"None",
")",
":",
"lock",
"=",
"threading",
".",
"Lock",
"(",
")",
"def",
"wrapper",
"(",
"function",
")",
":",
"def",
"new_function",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"lock",
".",
"acquire",
"(",
")",
"try",
":",
"return",
"function",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"finally",
":",
"lock",
".",
"release",
"(",
")",
"return",
"new_function",
"return",
"wrapper"
] | this wrapper will serialize access to func to a single thread . | train | false |
41,495 | def CreateUserSecret(user):
secret = base64.b32encode(os.urandom(_SECRET_BYTES))
secrets.PutSecret(_SecretName(user), secret)
DisplayUserSecret(user)
| [
"def",
"CreateUserSecret",
"(",
"user",
")",
":",
"secret",
"=",
"base64",
".",
"b32encode",
"(",
"os",
".",
"urandom",
"(",
"_SECRET_BYTES",
")",
")",
"secrets",
".",
"PutSecret",
"(",
"_SecretName",
"(",
"user",
")",
",",
"secret",
")",
"DisplayUserSecret",
"(",
"user",
")"
] | generates a random user secret and stores it to the secrets database . | train | false |
41,496 | def LoadSingleQueue(queue_info, open_fn=None):
builder = yaml_object.ObjectBuilder(QueueInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(queue_info)
queue_info = handler.GetResults()
if (len(queue_info) < 1):
raise MalformedQueueConfiguration('Empty queue configuration.')
if (len(queue_info) > 1):
raise MalformedQueueConfiguration('Multiple queue: sections in configuration.')
return queue_info[0]
| [
"def",
"LoadSingleQueue",
"(",
"queue_info",
",",
"open_fn",
"=",
"None",
")",
":",
"builder",
"=",
"yaml_object",
".",
"ObjectBuilder",
"(",
"QueueInfoExternal",
")",
"handler",
"=",
"yaml_builder",
".",
"BuilderHandler",
"(",
"builder",
")",
"listener",
"=",
"yaml_listener",
".",
"EventListener",
"(",
"handler",
")",
"listener",
".",
"Parse",
"(",
"queue_info",
")",
"queue_info",
"=",
"handler",
".",
"GetResults",
"(",
")",
"if",
"(",
"len",
"(",
"queue_info",
")",
"<",
"1",
")",
":",
"raise",
"MalformedQueueConfiguration",
"(",
"'Empty queue configuration.'",
")",
"if",
"(",
"len",
"(",
"queue_info",
")",
">",
"1",
")",
":",
"raise",
"MalformedQueueConfiguration",
"(",
"'Multiple queue: sections in configuration.'",
")",
"return",
"queue_info",
"[",
"0",
"]"
] | load a queue . | train | false |
41,497 | def get_source_index():
try:
exists = STORAGE.index_exists('source')
except OSError:
create_index()
exists = False
if (not exists):
create_source_index()
index = STORAGE.open_index('source')
if ('location' not in index.schema):
index.add_field('location', TEXT())
if ('pk' not in index.schema):
index.add_field('pk', NUMERIC(stored=True, unique=True))
if ('checksum' in index.schema):
index.remove_field('checksum')
return index
| [
"def",
"get_source_index",
"(",
")",
":",
"try",
":",
"exists",
"=",
"STORAGE",
".",
"index_exists",
"(",
"'source'",
")",
"except",
"OSError",
":",
"create_index",
"(",
")",
"exists",
"=",
"False",
"if",
"(",
"not",
"exists",
")",
":",
"create_source_index",
"(",
")",
"index",
"=",
"STORAGE",
".",
"open_index",
"(",
"'source'",
")",
"if",
"(",
"'location'",
"not",
"in",
"index",
".",
"schema",
")",
":",
"index",
".",
"add_field",
"(",
"'location'",
",",
"TEXT",
"(",
")",
")",
"if",
"(",
"'pk'",
"not",
"in",
"index",
".",
"schema",
")",
":",
"index",
".",
"add_field",
"(",
"'pk'",
",",
"NUMERIC",
"(",
"stored",
"=",
"True",
",",
"unique",
"=",
"True",
")",
")",
"if",
"(",
"'checksum'",
"in",
"index",
".",
"schema",
")",
":",
"index",
".",
"remove_field",
"(",
"'checksum'",
")",
"return",
"index"
] | returns source index object . | train | false |
41,498 | def _keys_match(attribute):
def key_match_invariant(pmap):
if (pmap is None):
return (True, '')
for (key, value) in pmap.items():
if (key != getattr(value, attribute)):
return (False, '{} is not correct key for {}'.format(key, value))
return (True, '')
return key_match_invariant
| [
"def",
"_keys_match",
"(",
"attribute",
")",
":",
"def",
"key_match_invariant",
"(",
"pmap",
")",
":",
"if",
"(",
"pmap",
"is",
"None",
")",
":",
"return",
"(",
"True",
",",
"''",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"pmap",
".",
"items",
"(",
")",
":",
"if",
"(",
"key",
"!=",
"getattr",
"(",
"value",
",",
"attribute",
")",
")",
":",
"return",
"(",
"False",
",",
"'{} is not correct key for {}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"return",
"(",
"True",
",",
"''",
")",
"return",
"key_match_invariant"
] | create an invariant for a field holding a pmap . | train | false |
41,499 | def GetRequestCpuUsage():
return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
| [
"def",
"GetRequestCpuUsage",
"(",
")",
":",
"return",
"_apphosting_runtime___python__apiproxy",
".",
"get_request_cpu_usage",
"(",
")"
] | returns the number of megacycles used so far by this request . | train | false |
41,500 | def _to_const(cf):
if isinstance(cf, string_types):
if (cf not in _str_to_frame):
raise ValueError(('Unknown cf %s' % cf))
cf = _str_to_frame[cf]
elif (not isinstance(cf, (int, np.integer))):
raise TypeError(('cf must be str or int, not %s' % type(cf)))
return int(cf)
| [
"def",
"_to_const",
"(",
"cf",
")",
":",
"if",
"isinstance",
"(",
"cf",
",",
"string_types",
")",
":",
"if",
"(",
"cf",
"not",
"in",
"_str_to_frame",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Unknown cf %s'",
"%",
"cf",
")",
")",
"cf",
"=",
"_str_to_frame",
"[",
"cf",
"]",
"elif",
"(",
"not",
"isinstance",
"(",
"cf",
",",
"(",
"int",
",",
"np",
".",
"integer",
")",
")",
")",
":",
"raise",
"TypeError",
"(",
"(",
"'cf must be str or int, not %s'",
"%",
"type",
"(",
"cf",
")",
")",
")",
"return",
"int",
"(",
"cf",
")"
] | convert string or int coord frame into int . | train | false |
41,501 | def at_server_cold_start():
pass
| [
"def",
"at_server_cold_start",
"(",
")",
":",
"pass"
] | this is called only when the server starts "cold" . | train | false |
41,502 | def getConnectError(e):
if isinstance(e, Exception):
args = e.args
else:
args = e
try:
(number, string) = args
except ValueError:
return ConnectError(string=e)
if (hasattr(socket, 'gaierror') and isinstance(e, socket.gaierror)):
klass = UnknownHostError
else:
klass = errnoMapping.get(number, ConnectError)
return klass(number, string)
| [
"def",
"getConnectError",
"(",
"e",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"Exception",
")",
":",
"args",
"=",
"e",
".",
"args",
"else",
":",
"args",
"=",
"e",
"try",
":",
"(",
"number",
",",
"string",
")",
"=",
"args",
"except",
"ValueError",
":",
"return",
"ConnectError",
"(",
"string",
"=",
"e",
")",
"if",
"(",
"hasattr",
"(",
"socket",
",",
"'gaierror'",
")",
"and",
"isinstance",
"(",
"e",
",",
"socket",
".",
"gaierror",
")",
")",
":",
"klass",
"=",
"UnknownHostError",
"else",
":",
"klass",
"=",
"errnoMapping",
".",
"get",
"(",
"number",
",",
"ConnectError",
")",
"return",
"klass",
"(",
"number",
",",
"string",
")"
] | given a socket exception . | train | false |
41,503 | def safe_rmpath(path):
try:
st = os.stat(path)
if stat.S_ISDIR(st.st_mode):
os.rmdir(path)
else:
os.remove(path)
except OSError as err:
if (err.errno != errno.ENOENT):
raise
| [
"def",
"safe_rmpath",
"(",
"path",
")",
":",
"try",
":",
"st",
"=",
"os",
".",
"stat",
"(",
"path",
")",
"if",
"stat",
".",
"S_ISDIR",
"(",
"st",
".",
"st_mode",
")",
":",
"os",
".",
"rmdir",
"(",
"path",
")",
"else",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"OSError",
"as",
"err",
":",
"if",
"(",
"err",
".",
"errno",
"!=",
"errno",
".",
"ENOENT",
")",
":",
"raise"
] | convenience function for removing temporary test files or dirs . | train | false |
41,504 | def promotions(request):
promotions = get_request_promotions(request)
context = {'url_path': request.path}
split_by_position(promotions, context)
return context
| [
"def",
"promotions",
"(",
"request",
")",
":",
"promotions",
"=",
"get_request_promotions",
"(",
"request",
")",
"context",
"=",
"{",
"'url_path'",
":",
"request",
".",
"path",
"}",
"split_by_position",
"(",
"promotions",
",",
"context",
")",
"return",
"context"
] | for adding bindings for banners and pods to the template context . | train | false |
41,505 | def proxy_functions(proxy):
if proxy:
return {'proxy_functions': proxy['rest_sample.fns']()}
| [
"def",
"proxy_functions",
"(",
"proxy",
")",
":",
"if",
"proxy",
":",
"return",
"{",
"'proxy_functions'",
":",
"proxy",
"[",
"'rest_sample.fns'",
"]",
"(",
")",
"}"
] | the loader will execute functions with one argument and pass a reference to the proxymodules lazyloader object . | train | false |
41,506 | def change_PROCESS_AUTOMATICALLY(process_automatically):
process_automatically = checkbox_to_value(process_automatically)
if (sickbeard.PROCESS_AUTOMATICALLY == process_automatically):
return
sickbeard.PROCESS_AUTOMATICALLY = process_automatically
if sickbeard.PROCESS_AUTOMATICALLY:
if (not sickbeard.autoPostProcessorScheduler.enable):
logger.log(u'Starting POSTPROCESSOR thread', logger.INFO)
sickbeard.autoPostProcessorScheduler.silent = False
sickbeard.autoPostProcessorScheduler.enable = True
else:
logger.log(u'Unable to start POSTPROCESSOR thread. Already running', logger.INFO)
else:
logger.log(u'Stopping POSTPROCESSOR thread', logger.INFO)
sickbeard.autoPostProcessorScheduler.enable = False
sickbeard.autoPostProcessorScheduler.silent = True
| [
"def",
"change_PROCESS_AUTOMATICALLY",
"(",
"process_automatically",
")",
":",
"process_automatically",
"=",
"checkbox_to_value",
"(",
"process_automatically",
")",
"if",
"(",
"sickbeard",
".",
"PROCESS_AUTOMATICALLY",
"==",
"process_automatically",
")",
":",
"return",
"sickbeard",
".",
"PROCESS_AUTOMATICALLY",
"=",
"process_automatically",
"if",
"sickbeard",
".",
"PROCESS_AUTOMATICALLY",
":",
"if",
"(",
"not",
"sickbeard",
".",
"autoPostProcessorScheduler",
".",
"enable",
")",
":",
"logger",
".",
"log",
"(",
"u'Starting POSTPROCESSOR thread'",
",",
"logger",
".",
"INFO",
")",
"sickbeard",
".",
"autoPostProcessorScheduler",
".",
"silent",
"=",
"False",
"sickbeard",
".",
"autoPostProcessorScheduler",
".",
"enable",
"=",
"True",
"else",
":",
"logger",
".",
"log",
"(",
"u'Unable to start POSTPROCESSOR thread. Already running'",
",",
"logger",
".",
"INFO",
")",
"else",
":",
"logger",
".",
"log",
"(",
"u'Stopping POSTPROCESSOR thread'",
",",
"logger",
".",
"INFO",
")",
"sickbeard",
".",
"autoPostProcessorScheduler",
".",
"enable",
"=",
"False",
"sickbeard",
".",
"autoPostProcessorScheduler",
".",
"silent",
"=",
"True"
] | enable/disable postprocessor thread todo: make this return true/false on success/failure . | train | false |
41,507 | def get_city_code(request):
if ('X-AppEngine-City' in request.headers):
return request.headers['X-AppEngine-City']
return None
| [
"def",
"get_city_code",
"(",
"request",
")",
":",
"if",
"(",
"'X-AppEngine-City'",
"in",
"request",
".",
"headers",
")",
":",
"return",
"request",
".",
"headers",
"[",
"'X-AppEngine-City'",
"]",
"return",
"None"
] | city code based on iso 3166-1 . | train | false |
41,508 | def get_file_name(blob_key):
if (not blob_key):
raise files.InvalidArgumentError('Empty blob key')
if (not isinstance(blob_key, (blobstore.BlobKey, basestring))):
raise files.InvalidArgumentError('Expected string or blobstore.BlobKey')
return ('%s%s' % (_BLOBSTORE_DIRECTORY, blob_key))
| [
"def",
"get_file_name",
"(",
"blob_key",
")",
":",
"if",
"(",
"not",
"blob_key",
")",
":",
"raise",
"files",
".",
"InvalidArgumentError",
"(",
"'Empty blob key'",
")",
"if",
"(",
"not",
"isinstance",
"(",
"blob_key",
",",
"(",
"blobstore",
".",
"BlobKey",
",",
"basestring",
")",
")",
")",
":",
"raise",
"files",
".",
"InvalidArgumentError",
"(",
"'Expected string or blobstore.BlobKey'",
")",
"return",
"(",
"'%s%s'",
"%",
"(",
"_BLOBSTORE_DIRECTORY",
",",
"blob_key",
")",
")"
] | get a filename to read from the blob . | train | false |
41,509 | def multivariateNormalPdf(z, x, sigma):
assert ((len(z.shape) == 1) and (len(x.shape) == 1) and (len(x) == len(z)) and (sigma.shape == (len(x), len(z))))
tmp = ((-0.5) * dot(dot((z - x), inv(sigma)), (z - x)))
res = (((1.0 / power((2.0 * pi), (len(z) / 2.0))) * (1.0 / sqrt(det(sigma)))) * exp(tmp))
return res
| [
"def",
"multivariateNormalPdf",
"(",
"z",
",",
"x",
",",
"sigma",
")",
":",
"assert",
"(",
"(",
"len",
"(",
"z",
".",
"shape",
")",
"==",
"1",
")",
"and",
"(",
"len",
"(",
"x",
".",
"shape",
")",
"==",
"1",
")",
"and",
"(",
"len",
"(",
"x",
")",
"==",
"len",
"(",
"z",
")",
")",
"and",
"(",
"sigma",
".",
"shape",
"==",
"(",
"len",
"(",
"x",
")",
",",
"len",
"(",
"z",
")",
")",
")",
")",
"tmp",
"=",
"(",
"(",
"-",
"0.5",
")",
"*",
"dot",
"(",
"dot",
"(",
"(",
"z",
"-",
"x",
")",
",",
"inv",
"(",
"sigma",
")",
")",
",",
"(",
"z",
"-",
"x",
")",
")",
")",
"res",
"=",
"(",
"(",
"(",
"1.0",
"/",
"power",
"(",
"(",
"2.0",
"*",
"pi",
")",
",",
"(",
"len",
"(",
"z",
")",
"/",
"2.0",
")",
")",
")",
"*",
"(",
"1.0",
"/",
"sqrt",
"(",
"det",
"(",
"sigma",
")",
")",
")",
")",
"*",
"exp",
"(",
"tmp",
")",
")",
"return",
"res"
] | the pdf of a multivariate normal distribution . | train | false |
41,510 | def pivot(self, index=None, columns=None, values=None):
if (values is None):
cols = ([columns] if (index is None) else [index, columns])
append = (index is None)
indexed = self.set_index(cols, append=append)
return indexed.unstack(columns)
else:
if (index is None):
index = self.index
else:
index = self[index]
indexed = Series(self[values].values, index=MultiIndex.from_arrays([index, self[columns]]))
return indexed.unstack(columns)
| [
"def",
"pivot",
"(",
"self",
",",
"index",
"=",
"None",
",",
"columns",
"=",
"None",
",",
"values",
"=",
"None",
")",
":",
"if",
"(",
"values",
"is",
"None",
")",
":",
"cols",
"=",
"(",
"[",
"columns",
"]",
"if",
"(",
"index",
"is",
"None",
")",
"else",
"[",
"index",
",",
"columns",
"]",
")",
"append",
"=",
"(",
"index",
"is",
"None",
")",
"indexed",
"=",
"self",
".",
"set_index",
"(",
"cols",
",",
"append",
"=",
"append",
")",
"return",
"indexed",
".",
"unstack",
"(",
"columns",
")",
"else",
":",
"if",
"(",
"index",
"is",
"None",
")",
":",
"index",
"=",
"self",
".",
"index",
"else",
":",
"index",
"=",
"self",
"[",
"index",
"]",
"indexed",
"=",
"Series",
"(",
"self",
"[",
"values",
"]",
".",
"values",
",",
"index",
"=",
"MultiIndex",
".",
"from_arrays",
"(",
"[",
"index",
",",
"self",
"[",
"columns",
"]",
"]",
")",
")",
"return",
"indexed",
".",
"unstack",
"(",
"columns",
")"
] | see dataframe . | train | false |
41,511 | def convert_ReferenceProperty(model, prop, kwargs):
kwargs['reference_class'] = prop.reference_class
kwargs.setdefault('allow_blank', (not prop.required))
return ReferencePropertyField(**kwargs)
| [
"def",
"convert_ReferenceProperty",
"(",
"model",
",",
"prop",
",",
"kwargs",
")",
":",
"kwargs",
"[",
"'reference_class'",
"]",
"=",
"prop",
".",
"reference_class",
"kwargs",
".",
"setdefault",
"(",
"'allow_blank'",
",",
"(",
"not",
"prop",
".",
"required",
")",
")",
"return",
"ReferencePropertyField",
"(",
"**",
"kwargs",
")"
] | returns a form field for a db . | train | false |
41,512 | @intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def update_user_preferences(requesting_user, update, user=None):
if ((not user) or isinstance(user, basestring)):
user = _get_authorized_user(requesting_user, user)
else:
_check_authorized(requesting_user, user.username)
errors = {}
serializers = {}
for preference_key in update.keys():
preference_value = update[preference_key]
if (preference_value is not None):
try:
serializer = create_user_preference_serializer(user, preference_key, preference_value)
validate_user_preference_serializer(serializer, preference_key, preference_value)
serializers[preference_key] = serializer
except PreferenceValidationError as error:
preference_error = error.preference_errors[preference_key]
errors[preference_key] = {'developer_message': preference_error['developer_message'], 'user_message': preference_error['user_message']}
if errors:
raise PreferenceValidationError(errors)
for preference_key in update.keys():
preference_value = update[preference_key]
if (preference_value is not None):
try:
serializer = serializers[preference_key]
serializer.save()
except Exception as error:
raise _create_preference_update_error(preference_key, preference_value, error)
else:
delete_user_preference(requesting_user, preference_key)
| [
"@",
"intercept_errors",
"(",
"UserAPIInternalError",
",",
"ignore_errors",
"=",
"[",
"UserAPIRequestError",
"]",
")",
"def",
"update_user_preferences",
"(",
"requesting_user",
",",
"update",
",",
"user",
"=",
"None",
")",
":",
"if",
"(",
"(",
"not",
"user",
")",
"or",
"isinstance",
"(",
"user",
",",
"basestring",
")",
")",
":",
"user",
"=",
"_get_authorized_user",
"(",
"requesting_user",
",",
"user",
")",
"else",
":",
"_check_authorized",
"(",
"requesting_user",
",",
"user",
".",
"username",
")",
"errors",
"=",
"{",
"}",
"serializers",
"=",
"{",
"}",
"for",
"preference_key",
"in",
"update",
".",
"keys",
"(",
")",
":",
"preference_value",
"=",
"update",
"[",
"preference_key",
"]",
"if",
"(",
"preference_value",
"is",
"not",
"None",
")",
":",
"try",
":",
"serializer",
"=",
"create_user_preference_serializer",
"(",
"user",
",",
"preference_key",
",",
"preference_value",
")",
"validate_user_preference_serializer",
"(",
"serializer",
",",
"preference_key",
",",
"preference_value",
")",
"serializers",
"[",
"preference_key",
"]",
"=",
"serializer",
"except",
"PreferenceValidationError",
"as",
"error",
":",
"preference_error",
"=",
"error",
".",
"preference_errors",
"[",
"preference_key",
"]",
"errors",
"[",
"preference_key",
"]",
"=",
"{",
"'developer_message'",
":",
"preference_error",
"[",
"'developer_message'",
"]",
",",
"'user_message'",
":",
"preference_error",
"[",
"'user_message'",
"]",
"}",
"if",
"errors",
":",
"raise",
"PreferenceValidationError",
"(",
"errors",
")",
"for",
"preference_key",
"in",
"update",
".",
"keys",
"(",
")",
":",
"preference_value",
"=",
"update",
"[",
"preference_key",
"]",
"if",
"(",
"preference_value",
"is",
"not",
"None",
")",
":",
"try",
":",
"serializer",
"=",
"serializers",
"[",
"preference_key",
"]",
"serializer",
".",
"save",
"(",
")",
"except",
"Exception",
"as",
"error",
":",
"raise",
"_create_preference_update_error",
"(",
"preference_key",
",",
"preference_value",
",",
"error",
")",
"else",
":",
"delete_user_preference",
"(",
"requesting_user",
",",
"preference_key",
")"
] | update the user preferences for the given user . | train | false |
41,513 | def with_text(no_text=False, text=False, utext=False):
values = []
if no_text:
values.append(0)
if text:
values.append(1)
if utext:
values.append(2)
def set_value(function):
try:
function.TEXT.add(values)
except AttributeError:
function.TEXT = set(values)
return function
return set_value
| [
"def",
"with_text",
"(",
"no_text",
"=",
"False",
",",
"text",
"=",
"False",
",",
"utext",
"=",
"False",
")",
":",
"values",
"=",
"[",
"]",
"if",
"no_text",
":",
"values",
".",
"append",
"(",
"0",
")",
"if",
"text",
":",
"values",
".",
"append",
"(",
"1",
")",
"if",
"utext",
":",
"values",
".",
"append",
"(",
"2",
")",
"def",
"set_value",
"(",
"function",
")",
":",
"try",
":",
"function",
".",
"TEXT",
".",
"add",
"(",
"values",
")",
"except",
"AttributeError",
":",
"function",
".",
"TEXT",
"=",
"set",
"(",
"values",
")",
"return",
"function",
"return",
"set_value"
] | decorator for benchmarks that use text . | train | false |
41,514 | def _GetValidMain(module):
if (not hasattr(module, 'main')):
return None
main = module.main
if (not hasattr(main, '__call__')):
return None
defaults = main.__defaults__
if defaults:
default_argcount = len(defaults)
else:
default_argcount = 0
if ((main.__code__.co_argcount - default_argcount) == 0):
return main
else:
return None
| [
"def",
"_GetValidMain",
"(",
"module",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"module",
",",
"'main'",
")",
")",
":",
"return",
"None",
"main",
"=",
"module",
".",
"main",
"if",
"(",
"not",
"hasattr",
"(",
"main",
",",
"'__call__'",
")",
")",
":",
"return",
"None",
"defaults",
"=",
"main",
".",
"__defaults__",
"if",
"defaults",
":",
"default_argcount",
"=",
"len",
"(",
"defaults",
")",
"else",
":",
"default_argcount",
"=",
"0",
"if",
"(",
"(",
"main",
".",
"__code__",
".",
"co_argcount",
"-",
"default_argcount",
")",
"==",
"0",
")",
":",
"return",
"main",
"else",
":",
"return",
"None"
] | returns a main function in module if it exists and is valid or none . | train | false |
41,515 | def _apps():
def _in_exclusions(module_name):
settings_exclusions = getattr(settings, 'RAPIDSMS_HANDLERS_EXCLUDE_APPS', [])
return ((module_name == 'rapidsms.contrib.handlers') or module_name.startswith('django.contrib.') or (module_name in settings_exclusions))
return [module_name for module_name in settings.INSTALLED_APPS if (not _in_exclusions(module_name))]
| [
"def",
"_apps",
"(",
")",
":",
"def",
"_in_exclusions",
"(",
"module_name",
")",
":",
"settings_exclusions",
"=",
"getattr",
"(",
"settings",
",",
"'RAPIDSMS_HANDLERS_EXCLUDE_APPS'",
",",
"[",
"]",
")",
"return",
"(",
"(",
"module_name",
"==",
"'rapidsms.contrib.handlers'",
")",
"or",
"module_name",
".",
"startswith",
"(",
"'django.contrib.'",
")",
"or",
"(",
"module_name",
"in",
"settings_exclusions",
")",
")",
"return",
"[",
"module_name",
"for",
"module_name",
"in",
"settings",
".",
"INSTALLED_APPS",
"if",
"(",
"not",
"_in_exclusions",
"(",
"module_name",
")",
")",
"]"
] | return a list of the apps which may contain handlers . | train | false |
41,516 | @require_context
@pick_context_manager_reader
def flavor_get(context, id):
result = _flavor_get_query(context).filter_by(id=id).first()
if (not result):
raise exception.FlavorNotFound(flavor_id=id)
return _dict_with_extra_specs(result)
| [
"@",
"require_context",
"@",
"pick_context_manager_reader",
"def",
"flavor_get",
"(",
"context",
",",
"id",
")",
":",
"result",
"=",
"_flavor_get_query",
"(",
"context",
")",
".",
"filter_by",
"(",
"id",
"=",
"id",
")",
".",
"first",
"(",
")",
"if",
"(",
"not",
"result",
")",
":",
"raise",
"exception",
".",
"FlavorNotFound",
"(",
"flavor_id",
"=",
"id",
")",
"return",
"_dict_with_extra_specs",
"(",
"result",
")"
] | returns a dict describing specific flavor . | train | false |
41,517 | def get_best_encoding(stream):
rv = (getattr(stream, 'encoding', None) or sys.getdefaultencoding())
if is_ascii_encoding(rv):
return 'utf-8'
return rv
| [
"def",
"get_best_encoding",
"(",
"stream",
")",
":",
"rv",
"=",
"(",
"getattr",
"(",
"stream",
",",
"'encoding'",
",",
"None",
")",
"or",
"sys",
".",
"getdefaultencoding",
"(",
")",
")",
"if",
"is_ascii_encoding",
"(",
"rv",
")",
":",
"return",
"'utf-8'",
"return",
"rv"
] | returns the default stream encoding if not found . | train | true |
41,518 | def init_siteconfig(app, created_models, verbosity, db=None, **kwargs):
try:
site = Site.objects.get_current()
except Site.DoesNotExist:
from django.contrib.sites.management import create_default_site
create_default_site(app, created_models, verbosity, db=db)
site = Site.objects.get_current()
(siteconfig, is_new) = SiteConfiguration.objects.get_or_create(site=site)
new_version = get_version_string()
if is_new:
if (Site not in created_models):
print(u'*** Migrating settings from settings_local.py to the database.')
migrate_settings(siteconfig)
if (Site not in created_models):
print(u'*** If you have previously configured Review Board through a ')
print(u'*** settings_local.py file, please ensure that the migration ')
print(u'*** was successful by verifying your settings at')
print((u'*** %s://%s%sadmin/settings/' % (siteconfig.get(u'site_domain_method'), site.domain, settings.SITE_ROOT)))
siteconfig.version = new_version
siteconfig.save()
elif (siteconfig.version != new_version):
print((u'Upgrading Review Board from %s to %s' % (siteconfig.version, new_version)))
siteconfig.version = new_version
siteconfig.save()
| [
"def",
"init_siteconfig",
"(",
"app",
",",
"created_models",
",",
"verbosity",
",",
"db",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"try",
":",
"site",
"=",
"Site",
".",
"objects",
".",
"get_current",
"(",
")",
"except",
"Site",
".",
"DoesNotExist",
":",
"from",
"django",
".",
"contrib",
".",
"sites",
".",
"management",
"import",
"create_default_site",
"create_default_site",
"(",
"app",
",",
"created_models",
",",
"verbosity",
",",
"db",
"=",
"db",
")",
"site",
"=",
"Site",
".",
"objects",
".",
"get_current",
"(",
")",
"(",
"siteconfig",
",",
"is_new",
")",
"=",
"SiteConfiguration",
".",
"objects",
".",
"get_or_create",
"(",
"site",
"=",
"site",
")",
"new_version",
"=",
"get_version_string",
"(",
")",
"if",
"is_new",
":",
"if",
"(",
"Site",
"not",
"in",
"created_models",
")",
":",
"print",
"(",
"u'*** Migrating settings from settings_local.py to the database.'",
")",
"migrate_settings",
"(",
"siteconfig",
")",
"if",
"(",
"Site",
"not",
"in",
"created_models",
")",
":",
"print",
"(",
"u'*** If you have previously configured Review Board through a '",
")",
"print",
"(",
"u'*** settings_local.py file, please ensure that the migration '",
")",
"print",
"(",
"u'*** was successful by verifying your settings at'",
")",
"print",
"(",
"(",
"u'*** %s://%s%sadmin/settings/'",
"%",
"(",
"siteconfig",
".",
"get",
"(",
"u'site_domain_method'",
")",
",",
"site",
".",
"domain",
",",
"settings",
".",
"SITE_ROOT",
")",
")",
")",
"siteconfig",
".",
"version",
"=",
"new_version",
"siteconfig",
".",
"save",
"(",
")",
"elif",
"(",
"siteconfig",
".",
"version",
"!=",
"new_version",
")",
":",
"print",
"(",
"(",
"u'Upgrading Review Board from %s to %s'",
"%",
"(",
"siteconfig",
".",
"version",
",",
"new_version",
")",
")",
")",
"siteconfig",
".",
"version",
"=",
"new_version",
"siteconfig",
".",
"save",
"(",
")"
] | initialize the site configuration . | train | false |
41,520 | def pull_external(ctx, filename):
hashed_filename = hash_func(filename)
rel_path = path.join('webassets-external', ('%s_%s' % (hashed_filename, path.basename(filename))))
full_path = path.join(ctx.directory, rel_path)
if path.isfile(full_path):
gs = (lambda p: os.stat(p).st_mtime)
if (gs(full_path) > gs(filename)):
return full_path
directory = path.dirname(full_path)
if (not path.exists(directory)):
os.makedirs(directory)
FileHunk(filename).save(full_path)
return full_path
| [
"def",
"pull_external",
"(",
"ctx",
",",
"filename",
")",
":",
"hashed_filename",
"=",
"hash_func",
"(",
"filename",
")",
"rel_path",
"=",
"path",
".",
"join",
"(",
"'webassets-external'",
",",
"(",
"'%s_%s'",
"%",
"(",
"hashed_filename",
",",
"path",
".",
"basename",
"(",
"filename",
")",
")",
")",
")",
"full_path",
"=",
"path",
".",
"join",
"(",
"ctx",
".",
"directory",
",",
"rel_path",
")",
"if",
"path",
".",
"isfile",
"(",
"full_path",
")",
":",
"gs",
"=",
"(",
"lambda",
"p",
":",
"os",
".",
"stat",
"(",
"p",
")",
".",
"st_mtime",
")",
"if",
"(",
"gs",
"(",
"full_path",
")",
">",
"gs",
"(",
"filename",
")",
")",
":",
"return",
"full_path",
"directory",
"=",
"path",
".",
"dirname",
"(",
"full_path",
")",
"if",
"(",
"not",
"path",
".",
"exists",
"(",
"directory",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"directory",
")",
"FileHunk",
"(",
"filename",
")",
".",
"save",
"(",
"full_path",
")",
"return",
"full_path"
] | helper which will pull filename into :attr:environment . | train | false |
41,521 | def retry_request(n_retries=5, **kwargs):
exc = resp = None
for n_try in range(n_retries):
try:
exc = None
resp = requests.request(**kwargs)
if (resp.status_code < 500):
return resp
except requests.RequestException as exc:
pass
time.sleep(((2 ** (n_try + 1)) * 0.5))
if exc:
raise exc
if resp:
resp.raise_for_status()
raise Exception('An unknown problem occurred with a request.')
| [
"def",
"retry_request",
"(",
"n_retries",
"=",
"5",
",",
"**",
"kwargs",
")",
":",
"exc",
"=",
"resp",
"=",
"None",
"for",
"n_try",
"in",
"range",
"(",
"n_retries",
")",
":",
"try",
":",
"exc",
"=",
"None",
"resp",
"=",
"requests",
".",
"request",
"(",
"**",
"kwargs",
")",
"if",
"(",
"resp",
".",
"status_code",
"<",
"500",
")",
":",
"return",
"resp",
"except",
"requests",
".",
"RequestException",
"as",
"exc",
":",
"pass",
"time",
".",
"sleep",
"(",
"(",
"(",
"2",
"**",
"(",
"n_try",
"+",
"1",
")",
")",
"*",
"0.5",
")",
")",
"if",
"exc",
":",
"raise",
"exc",
"if",
"resp",
":",
"resp",
".",
"raise_for_status",
"(",
")",
"raise",
"Exception",
"(",
"'An unknown problem occurred with a request.'",
")"
] | retry a requests request with exponential backoff up to n_retries times . | train | false |
41,524 | @release.command()
def publish():
version = get_version(1)
with chdir(BASE):
subprocess.check_call(['git', 'push'])
subprocess.check_call(['git', 'push', '--tags'])
path = os.path.join(BASE, 'dist', 'beets-{}.tar.gz'.format(version))
subprocess.check_call(['twine', 'upload', path])
| [
"@",
"release",
".",
"command",
"(",
")",
"def",
"publish",
"(",
")",
":",
"version",
"=",
"get_version",
"(",
"1",
")",
"with",
"chdir",
"(",
"BASE",
")",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"'git'",
",",
"'push'",
"]",
")",
"subprocess",
".",
"check_call",
"(",
"[",
"'git'",
",",
"'push'",
",",
"'--tags'",
"]",
")",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"BASE",
",",
"'dist'",
",",
"'beets-{}.tar.gz'",
".",
"format",
"(",
"version",
")",
")",
"subprocess",
".",
"check_call",
"(",
"[",
"'twine'",
",",
"'upload'",
",",
"path",
"]",
")"
] | publish message to an mqtt topic . | train | false |
41,525 | def test_url_utf8():
out = u'<a href="%(url)s" rel="nofollow">%(url)s</a>'
tests = (('http://\xc3\xa9x\xc3\xa1mpl\xc3\xa9.com/', (out % {'url': u'http://\xe9x\xe1mpl\xe9.com/'})), ('http://\xc3\xa9x\xc3\xa1mpl\xc3\xa9.com/\xc3\xad\xc3\xa0\xc3\xb1\xc3\xa1/', (out % {'url': u'http://\xe9x\xe1mpl\xe9.com/\xed\xe0\xf1\xe1/'})), ('http://\xc3\xa9x\xc3\xa1mpl\xc3\xa9.com/\xc3\xad\xc3\xa0\xc3\xb1\xc3\xa1/?foo=bar', (out % {'url': u'http://\xe9x\xe1mpl\xe9.com/\xed\xe0\xf1\xe1/?foo=bar'})), ('http://\xc3\xa9x\xc3\xa1mpl\xc3\xa9.com/\xc3\xad\xc3\xa0\xc3\xb1\xc3\xa1/?f\xc3\xb3o=b\xc3\xa1r', (out % {'url': u'http://\xe9x\xe1mpl\xe9.com/\xed\xe0\xf1\xe1/?f\xf3o=b\xe1r'})))
def check(test, expected_output):
eq_(expected_output, linkify(test))
for (test, expected_output) in tests:
(yield (check, test, expected_output))
| [
"def",
"test_url_utf8",
"(",
")",
":",
"out",
"=",
"u'<a href=\"%(url)s\" rel=\"nofollow\">%(url)s</a>'",
"tests",
"=",
"(",
"(",
"'http://\\xc3\\xa9x\\xc3\\xa1mpl\\xc3\\xa9.com/'",
",",
"(",
"out",
"%",
"{",
"'url'",
":",
"u'http://\\xe9x\\xe1mpl\\xe9.com/'",
"}",
")",
")",
",",
"(",
"'http://\\xc3\\xa9x\\xc3\\xa1mpl\\xc3\\xa9.com/\\xc3\\xad\\xc3\\xa0\\xc3\\xb1\\xc3\\xa1/'",
",",
"(",
"out",
"%",
"{",
"'url'",
":",
"u'http://\\xe9x\\xe1mpl\\xe9.com/\\xed\\xe0\\xf1\\xe1/'",
"}",
")",
")",
",",
"(",
"'http://\\xc3\\xa9x\\xc3\\xa1mpl\\xc3\\xa9.com/\\xc3\\xad\\xc3\\xa0\\xc3\\xb1\\xc3\\xa1/?foo=bar'",
",",
"(",
"out",
"%",
"{",
"'url'",
":",
"u'http://\\xe9x\\xe1mpl\\xe9.com/\\xed\\xe0\\xf1\\xe1/?foo=bar'",
"}",
")",
")",
",",
"(",
"'http://\\xc3\\xa9x\\xc3\\xa1mpl\\xc3\\xa9.com/\\xc3\\xad\\xc3\\xa0\\xc3\\xb1\\xc3\\xa1/?f\\xc3\\xb3o=b\\xc3\\xa1r'",
",",
"(",
"out",
"%",
"{",
"'url'",
":",
"u'http://\\xe9x\\xe1mpl\\xe9.com/\\xed\\xe0\\xf1\\xe1/?f\\xf3o=b\\xe1r'",
"}",
")",
")",
")",
"def",
"check",
"(",
"test",
",",
"expected_output",
")",
":",
"eq_",
"(",
"expected_output",
",",
"linkify",
"(",
"test",
")",
")",
"for",
"(",
"test",
",",
"expected_output",
")",
"in",
"tests",
":",
"(",
"yield",
"(",
"check",
",",
"test",
",",
"expected_output",
")",
")"
] | allow utf8 characters in urls themselves . | train | false |
41,526 | def orthogonal_procrustes(A, B, check_finite=True):
if check_finite:
A = np.asarray_chkfinite(A)
B = np.asarray_chkfinite(B)
else:
A = np.asanyarray(A)
B = np.asanyarray(B)
if (A.ndim != 2):
raise ValueError(('expected ndim to be 2, but observed %s' % A.ndim))
if (A.shape != B.shape):
raise ValueError(('the shapes of A and B differ (%s vs %s)' % (A.shape, B.shape)))
(u, w, vt) = svd(B.T.dot(A).T)
R = u.dot(vt)
scale = w.sum()
return (R, scale)
| [
"def",
"orthogonal_procrustes",
"(",
"A",
",",
"B",
",",
"check_finite",
"=",
"True",
")",
":",
"if",
"check_finite",
":",
"A",
"=",
"np",
".",
"asarray_chkfinite",
"(",
"A",
")",
"B",
"=",
"np",
".",
"asarray_chkfinite",
"(",
"B",
")",
"else",
":",
"A",
"=",
"np",
".",
"asanyarray",
"(",
"A",
")",
"B",
"=",
"np",
".",
"asanyarray",
"(",
"B",
")",
"if",
"(",
"A",
".",
"ndim",
"!=",
"2",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'expected ndim to be 2, but observed %s'",
"%",
"A",
".",
"ndim",
")",
")",
"if",
"(",
"A",
".",
"shape",
"!=",
"B",
".",
"shape",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'the shapes of A and B differ (%s vs %s)'",
"%",
"(",
"A",
".",
"shape",
",",
"B",
".",
"shape",
")",
")",
")",
"(",
"u",
",",
"w",
",",
"vt",
")",
"=",
"svd",
"(",
"B",
".",
"T",
".",
"dot",
"(",
"A",
")",
".",
"T",
")",
"R",
"=",
"u",
".",
"dot",
"(",
"vt",
")",
"scale",
"=",
"w",
".",
"sum",
"(",
")",
"return",
"(",
"R",
",",
"scale",
")"
] | compute the matrix solution of the orthogonal procrustes problem . | train | false |
41,527 | def clearcache():
global cache
cache = {}
| [
"def",
"clearcache",
"(",
")",
":",
"global",
"cache",
"cache",
"=",
"{",
"}"
] | clear the cache entirely . | train | false |
41,528 | def _get_persistent_modules():
mods = set()
with salt.utils.fopen(_LOADER_CONF, 'r') as loader_conf:
for line in loader_conf:
line = line.strip()
mod_name = _get_module_name(line)
if mod_name:
mods.add(mod_name)
return mods
| [
"def",
"_get_persistent_modules",
"(",
")",
":",
"mods",
"=",
"set",
"(",
")",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"_LOADER_CONF",
",",
"'r'",
")",
"as",
"loader_conf",
":",
"for",
"line",
"in",
"loader_conf",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"mod_name",
"=",
"_get_module_name",
"(",
"line",
")",
"if",
"mod_name",
":",
"mods",
".",
"add",
"(",
"mod_name",
")",
"return",
"mods"
] | returns a list of modules in loader . | train | true |
41,529 | def build_tables():
print(u'Building lexer and parser tables.')
sys.path.insert(0, os.path.dirname(__file__))
from xonsh.parser import Parser
Parser(lexer_table=u'lexer_table', yacc_table=u'parser_table', outputdir=u'xonsh', yacc_debug=True)
sys.path.pop(0)
| [
"def",
"build_tables",
"(",
")",
":",
"print",
"(",
"u'Building lexer and parser tables.'",
")",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"from",
"xonsh",
".",
"parser",
"import",
"Parser",
"Parser",
"(",
"lexer_table",
"=",
"u'lexer_table'",
",",
"yacc_table",
"=",
"u'parser_table'",
",",
"outputdir",
"=",
"u'xonsh'",
",",
"yacc_debug",
"=",
"True",
")",
"sys",
".",
"path",
".",
"pop",
"(",
"0",
")"
] | build the lexer/parser modules . | train | false |
41,531 | @handle_response_format
@treeio_login_required
def stream_checkmail(request, stream_id, response_format='html'):
user = request.user.profile
stream = get_object_or_404(MessageStream, pk=stream_id)
if (not user.has_permission(stream)):
return user_denied(request, message="You don't have access to this Stream", response_format=response_format)
try:
stream.process_email()
messages.add_message(request, messages.INFO, _('E-mails fetched successfully.'), fail_silently=True)
except:
try:
messages.add_message(request, messages.ERROR, _('Failed to retrieve messages for this stream. Please check stream settings'), fail_silently=True)
except:
pass
return HttpResponseRedirect(reverse('messaging_stream_view', args=[stream.id]))
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"stream_checkmail",
"(",
"request",
",",
"stream_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"user",
"=",
"request",
".",
"user",
".",
"profile",
"stream",
"=",
"get_object_or_404",
"(",
"MessageStream",
",",
"pk",
"=",
"stream_id",
")",
"if",
"(",
"not",
"user",
".",
"has_permission",
"(",
"stream",
")",
")",
":",
"return",
"user_denied",
"(",
"request",
",",
"message",
"=",
"\"You don't have access to this Stream\"",
",",
"response_format",
"=",
"response_format",
")",
"try",
":",
"stream",
".",
"process_email",
"(",
")",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"INFO",
",",
"_",
"(",
"'E-mails fetched successfully.'",
")",
",",
"fail_silently",
"=",
"True",
")",
"except",
":",
"try",
":",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"ERROR",
",",
"_",
"(",
"'Failed to retrieve messages for this stream. Please check stream settings'",
")",
",",
"fail_silently",
"=",
"True",
")",
"except",
":",
"pass",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'messaging_stream_view'",
",",
"args",
"=",
"[",
"stream",
".",
"id",
"]",
")",
")"
] | stream check mail . | train | false |
41,532 | def get_schema_view(title=None, url=None, urlconf=None, renderer_classes=None):
generator = SchemaGenerator(title=title, url=url, urlconf=urlconf)
if (renderer_classes is None):
if (renderers.BrowsableAPIRenderer in api_settings.DEFAULT_RENDERER_CLASSES):
rclasses = [renderers.CoreJSONRenderer, renderers.BrowsableAPIRenderer]
else:
rclasses = [renderers.CoreJSONRenderer]
else:
rclasses = renderer_classes
class SchemaView(APIView, ):
_ignore_model_permissions = True
exclude_from_schema = True
renderer_classes = rclasses
def get(self, request, *args, **kwargs):
schema = generator.get_schema(request)
if (schema is None):
raise exceptions.PermissionDenied()
return Response(schema)
return SchemaView.as_view()
| [
"def",
"get_schema_view",
"(",
"title",
"=",
"None",
",",
"url",
"=",
"None",
",",
"urlconf",
"=",
"None",
",",
"renderer_classes",
"=",
"None",
")",
":",
"generator",
"=",
"SchemaGenerator",
"(",
"title",
"=",
"title",
",",
"url",
"=",
"url",
",",
"urlconf",
"=",
"urlconf",
")",
"if",
"(",
"renderer_classes",
"is",
"None",
")",
":",
"if",
"(",
"renderers",
".",
"BrowsableAPIRenderer",
"in",
"api_settings",
".",
"DEFAULT_RENDERER_CLASSES",
")",
":",
"rclasses",
"=",
"[",
"renderers",
".",
"CoreJSONRenderer",
",",
"renderers",
".",
"BrowsableAPIRenderer",
"]",
"else",
":",
"rclasses",
"=",
"[",
"renderers",
".",
"CoreJSONRenderer",
"]",
"else",
":",
"rclasses",
"=",
"renderer_classes",
"class",
"SchemaView",
"(",
"APIView",
",",
")",
":",
"_ignore_model_permissions",
"=",
"True",
"exclude_from_schema",
"=",
"True",
"renderer_classes",
"=",
"rclasses",
"def",
"get",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"schema",
"=",
"generator",
".",
"get_schema",
"(",
"request",
")",
"if",
"(",
"schema",
"is",
"None",
")",
":",
"raise",
"exceptions",
".",
"PermissionDenied",
"(",
")",
"return",
"Response",
"(",
"schema",
")",
"return",
"SchemaView",
".",
"as_view",
"(",
")"
] | return a schema view . | train | false |
41,533 | def test_disk_store_alt_name_relpath():
with TestConfig(DISK_TEST_CONFIG) as (directory, object_store):
empty_dataset = MockDataset(1)
directory.write('', 'files1/000/dataset_1.dat')
directory.write('foo', 'foo.txt')
try:
assert (object_store.get_data(empty_dataset, extra_dir='dataset_1_files', alt_name='../../../foo.txt') != 'foo')
except ObjectInvalid:
pass
| [
"def",
"test_disk_store_alt_name_relpath",
"(",
")",
":",
"with",
"TestConfig",
"(",
"DISK_TEST_CONFIG",
")",
"as",
"(",
"directory",
",",
"object_store",
")",
":",
"empty_dataset",
"=",
"MockDataset",
"(",
"1",
")",
"directory",
".",
"write",
"(",
"''",
",",
"'files1/000/dataset_1.dat'",
")",
"directory",
".",
"write",
"(",
"'foo'",
",",
"'foo.txt'",
")",
"try",
":",
"assert",
"(",
"object_store",
".",
"get_data",
"(",
"empty_dataset",
",",
"extra_dir",
"=",
"'dataset_1_files'",
",",
"alt_name",
"=",
"'../../../foo.txt'",
")",
"!=",
"'foo'",
")",
"except",
"ObjectInvalid",
":",
"pass"
] | test that alt_name cannot be used to access arbitrary paths using a relative path . | train | false |
41,536 | def get_NSString(string):
return CFSTR(string).autorelease()
| [
"def",
"get_NSString",
"(",
"string",
")",
":",
"return",
"CFSTR",
"(",
"string",
")",
".",
"autorelease",
"(",
")"
] | autoreleased version of cfstr . | train | false |
41,538 | def get_project_stats(project):
return [{u'language': force_text(tup[0]), u'code': tup[0].code, u'total': tup[2], u'translated': tup[1], u'translated_percent': translation_percent(tup[1], tup[2]), u'total_words': tup[4], u'translated_words': tup[3], u'words_percent': translation_percent(tup[3], tup[4])} for tup in get_per_language_stats(project)]
| [
"def",
"get_project_stats",
"(",
"project",
")",
":",
"return",
"[",
"{",
"u'language'",
":",
"force_text",
"(",
"tup",
"[",
"0",
"]",
")",
",",
"u'code'",
":",
"tup",
"[",
"0",
"]",
".",
"code",
",",
"u'total'",
":",
"tup",
"[",
"2",
"]",
",",
"u'translated'",
":",
"tup",
"[",
"1",
"]",
",",
"u'translated_percent'",
":",
"translation_percent",
"(",
"tup",
"[",
"1",
"]",
",",
"tup",
"[",
"2",
"]",
")",
",",
"u'total_words'",
":",
"tup",
"[",
"4",
"]",
",",
"u'translated_words'",
":",
"tup",
"[",
"3",
"]",
",",
"u'words_percent'",
":",
"translation_percent",
"(",
"tup",
"[",
"3",
"]",
",",
"tup",
"[",
"4",
"]",
")",
"}",
"for",
"tup",
"in",
"get_per_language_stats",
"(",
"project",
")",
"]"
] | returns stats for project . | train | false |
41,539 | def import_files(lib, paths, query):
for path in paths:
if (not os.path.exists(syspath(normpath(path)))):
raise ui.UserError(u'no such file or directory: {0}'.format(displayable_path(path)))
if (config['import']['quiet'] and config['import']['timid']):
raise ui.UserError(u"can't be both quiet and timid")
if (config['import']['log'].get() is not None):
logpath = syspath(config['import']['log'].as_filename())
try:
loghandler = logging.FileHandler(logpath)
except IOError:
raise ui.UserError(u'could not open log file for writing: {0}'.format(displayable_path(logpath)))
else:
loghandler = None
if ((config['import']['resume'].get() == 'ask') and config['import']['quiet']):
config['import']['resume'] = False
session = TerminalImportSession(lib, loghandler, paths, query)
session.run()
plugins.send('import', lib=lib, paths=paths)
| [
"def",
"import_files",
"(",
"lib",
",",
"paths",
",",
"query",
")",
":",
"for",
"path",
"in",
"paths",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"syspath",
"(",
"normpath",
"(",
"path",
")",
")",
")",
")",
":",
"raise",
"ui",
".",
"UserError",
"(",
"u'no such file or directory: {0}'",
".",
"format",
"(",
"displayable_path",
"(",
"path",
")",
")",
")",
"if",
"(",
"config",
"[",
"'import'",
"]",
"[",
"'quiet'",
"]",
"and",
"config",
"[",
"'import'",
"]",
"[",
"'timid'",
"]",
")",
":",
"raise",
"ui",
".",
"UserError",
"(",
"u\"can't be both quiet and timid\"",
")",
"if",
"(",
"config",
"[",
"'import'",
"]",
"[",
"'log'",
"]",
".",
"get",
"(",
")",
"is",
"not",
"None",
")",
":",
"logpath",
"=",
"syspath",
"(",
"config",
"[",
"'import'",
"]",
"[",
"'log'",
"]",
".",
"as_filename",
"(",
")",
")",
"try",
":",
"loghandler",
"=",
"logging",
".",
"FileHandler",
"(",
"logpath",
")",
"except",
"IOError",
":",
"raise",
"ui",
".",
"UserError",
"(",
"u'could not open log file for writing: {0}'",
".",
"format",
"(",
"displayable_path",
"(",
"logpath",
")",
")",
")",
"else",
":",
"loghandler",
"=",
"None",
"if",
"(",
"(",
"config",
"[",
"'import'",
"]",
"[",
"'resume'",
"]",
".",
"get",
"(",
")",
"==",
"'ask'",
")",
"and",
"config",
"[",
"'import'",
"]",
"[",
"'quiet'",
"]",
")",
":",
"config",
"[",
"'import'",
"]",
"[",
"'resume'",
"]",
"=",
"False",
"session",
"=",
"TerminalImportSession",
"(",
"lib",
",",
"loghandler",
",",
"paths",
",",
"query",
")",
"session",
".",
"run",
"(",
")",
"plugins",
".",
"send",
"(",
"'import'",
",",
"lib",
"=",
"lib",
",",
"paths",
"=",
"paths",
")"
] | import the files in the given list of paths or matching the query . | train | false |
41,540 | @decorator
def needs_sqlite(f, self, *a, **kw):
if ((sqlite3 is None) or (not self.enabled)):
return []
else:
return f(self, *a, **kw)
| [
"@",
"decorator",
"def",
"needs_sqlite",
"(",
"f",
",",
"self",
",",
"*",
"a",
",",
"**",
"kw",
")",
":",
"if",
"(",
"(",
"sqlite3",
"is",
"None",
")",
"or",
"(",
"not",
"self",
".",
"enabled",
")",
")",
":",
"return",
"[",
"]",
"else",
":",
"return",
"f",
"(",
"self",
",",
"*",
"a",
",",
"**",
"kw",
")"
] | decorator: return an empty list in the absence of sqlite . | train | false |
41,541 | def month_by_name(name, lang=u'en'):
month_names = MONTH_NAMES.get(lang, MONTH_NAMES[u'en'])
try:
return (month_names.index(name) + 1)
except ValueError:
return None
| [
"def",
"month_by_name",
"(",
"name",
",",
"lang",
"=",
"u'en'",
")",
":",
"month_names",
"=",
"MONTH_NAMES",
".",
"get",
"(",
"lang",
",",
"MONTH_NAMES",
"[",
"u'en'",
"]",
")",
"try",
":",
"return",
"(",
"month_names",
".",
"index",
"(",
"name",
")",
"+",
"1",
")",
"except",
"ValueError",
":",
"return",
"None"
] | return the number of a month by english name . | train | false |
41,543 | def assert_equal_mod_whitespace(first, second, msg=None):
nose.tools.assert_equal(compact_whitespace(first), compact_whitespace(second), msg)
| [
"def",
"assert_equal_mod_whitespace",
"(",
"first",
",",
"second",
",",
"msg",
"=",
"None",
")",
":",
"nose",
".",
"tools",
".",
"assert_equal",
"(",
"compact_whitespace",
"(",
"first",
")",
",",
"compact_whitespace",
"(",
"second",
")",
",",
"msg",
")"
] | asserts that two strings are equal . | train | false |
41,546 | def get_site():
return (c.site.analytics_name if c.site else '')
| [
"def",
"get_site",
"(",
")",
":",
"return",
"(",
"c",
".",
"site",
".",
"analytics_name",
"if",
"c",
".",
"site",
"else",
"''",
")"
] | return the name of the current "site" . | train | false |
41,547 | def get_minibatch(doc_iter, size, pos_class=positive_class):
data = [(u'{title}\n\n{body}'.format(**doc), (pos_class in doc['topics'])) for doc in itertools.islice(doc_iter, size) if doc['topics']]
if (not len(data)):
return (np.asarray([], dtype=int), np.asarray([], dtype=int))
(X_text, y) = zip(*data)
return (X_text, np.asarray(y, dtype=int))
| [
"def",
"get_minibatch",
"(",
"doc_iter",
",",
"size",
",",
"pos_class",
"=",
"positive_class",
")",
":",
"data",
"=",
"[",
"(",
"u'{title}\\n\\n{body}'",
".",
"format",
"(",
"**",
"doc",
")",
",",
"(",
"pos_class",
"in",
"doc",
"[",
"'topics'",
"]",
")",
")",
"for",
"doc",
"in",
"itertools",
".",
"islice",
"(",
"doc_iter",
",",
"size",
")",
"if",
"doc",
"[",
"'topics'",
"]",
"]",
"if",
"(",
"not",
"len",
"(",
"data",
")",
")",
":",
"return",
"(",
"np",
".",
"asarray",
"(",
"[",
"]",
",",
"dtype",
"=",
"int",
")",
",",
"np",
".",
"asarray",
"(",
"[",
"]",
",",
"dtype",
"=",
"int",
")",
")",
"(",
"X_text",
",",
"y",
")",
"=",
"zip",
"(",
"*",
"data",
")",
"return",
"(",
"X_text",
",",
"np",
".",
"asarray",
"(",
"y",
",",
"dtype",
"=",
"int",
")",
")"
] | extract a minibatch of examples . | train | false |
41,548 | def getLatestVersionInfo():
try:
page = urllib.request.urlopen(versionURL)
except urllib.error.URLError:
return (-1)
latest = {}
for line in page.readlines():
if (line.find(':') == (-1)):
return (-1)
(key, keyInfo) = line.split(':')
latest[key] = keyInfo.replace('\n', '').replace('\r', '')
return latest
| [
"def",
"getLatestVersionInfo",
"(",
")",
":",
"try",
":",
"page",
"=",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"versionURL",
")",
"except",
"urllib",
".",
"error",
".",
"URLError",
":",
"return",
"(",
"-",
"1",
")",
"latest",
"=",
"{",
"}",
"for",
"line",
"in",
"page",
".",
"readlines",
"(",
")",
":",
"if",
"(",
"line",
".",
"find",
"(",
"':'",
")",
"==",
"(",
"-",
"1",
")",
")",
":",
"return",
"(",
"-",
"1",
")",
"(",
"key",
",",
"keyInfo",
")",
"=",
"line",
".",
"split",
"(",
"':'",
")",
"latest",
"[",
"key",
"]",
"=",
"keyInfo",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
".",
"replace",
"(",
"'\\r'",
",",
"''",
")",
"return",
"latest"
] | fetch info about the latest availiable version . | train | false |
41,549 | def make_event():
res = event()
return (res, res.make_caller())
| [
"def",
"make_event",
"(",
")",
":",
"res",
"=",
"event",
"(",
")",
"return",
"(",
"res",
",",
"res",
".",
"make_caller",
"(",
")",
")"
] | creates an event object tuple . | train | false |
41,551 | def validate_ok_for_replace(replacement):
validate_is_mapping('replacement', replacement)
if (replacement and (not isinstance(replacement, RawBSONDocument))):
first = next(iter(replacement))
if first.startswith('$'):
raise ValueError('replacement can not include $ operators')
| [
"def",
"validate_ok_for_replace",
"(",
"replacement",
")",
":",
"validate_is_mapping",
"(",
"'replacement'",
",",
"replacement",
")",
"if",
"(",
"replacement",
"and",
"(",
"not",
"isinstance",
"(",
"replacement",
",",
"RawBSONDocument",
")",
")",
")",
":",
"first",
"=",
"next",
"(",
"iter",
"(",
"replacement",
")",
")",
"if",
"first",
".",
"startswith",
"(",
"'$'",
")",
":",
"raise",
"ValueError",
"(",
"'replacement can not include $ operators'",
")"
] | validate a replacement document . | train | true |
41,552 | def versioned_bucket_lister(bucket, prefix='', delimiter='', marker='', generation_marker='', headers=None):
more_results = True
k = None
while more_results:
rs = bucket.get_all_versions(prefix=prefix, marker=marker, generation_marker=generation_marker, delimiter=delimiter, headers=headers, max_keys=999)
for k in rs:
(yield k)
marker = rs.next_marker
generation_marker = rs.next_generation_marker
more_results = rs.is_truncated
| [
"def",
"versioned_bucket_lister",
"(",
"bucket",
",",
"prefix",
"=",
"''",
",",
"delimiter",
"=",
"''",
",",
"marker",
"=",
"''",
",",
"generation_marker",
"=",
"''",
",",
"headers",
"=",
"None",
")",
":",
"more_results",
"=",
"True",
"k",
"=",
"None",
"while",
"more_results",
":",
"rs",
"=",
"bucket",
".",
"get_all_versions",
"(",
"prefix",
"=",
"prefix",
",",
"marker",
"=",
"marker",
",",
"generation_marker",
"=",
"generation_marker",
",",
"delimiter",
"=",
"delimiter",
",",
"headers",
"=",
"headers",
",",
"max_keys",
"=",
"999",
")",
"for",
"k",
"in",
"rs",
":",
"(",
"yield",
"k",
")",
"marker",
"=",
"rs",
".",
"next_marker",
"generation_marker",
"=",
"rs",
".",
"next_generation_marker",
"more_results",
"=",
"rs",
".",
"is_truncated"
] | a generator function for listing versioned objects . | train | false |
41,553 | def get_redirect():
return_to = request.params.get('return_to')
hash_given = request.params.get('hash', '')
if (not (return_to and hash_given)):
return None
hash_expected = get_message_hash(return_to)
if (hash_given == hash_expected):
return return_to.encode('utf-8')
return None
| [
"def",
"get_redirect",
"(",
")",
":",
"return_to",
"=",
"request",
".",
"params",
".",
"get",
"(",
"'return_to'",
")",
"hash_given",
"=",
"request",
".",
"params",
".",
"get",
"(",
"'hash'",
",",
"''",
")",
"if",
"(",
"not",
"(",
"return_to",
"and",
"hash_given",
")",
")",
":",
"return",
"None",
"hash_expected",
"=",
"get_message_hash",
"(",
"return_to",
")",
"if",
"(",
"hash_given",
"==",
"hash_expected",
")",
":",
"return",
"return_to",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"None"
] | checks the return_to value against the hash . | train | false |
41,554 | def _read_overrides(overrides_file):
if (not overrides_file):
return {}
overrides_lines = [line for line in overrides_file.readlines() if (not line.startswith('#'))]
overrides_pairs = zip(overrides_lines[0::3], overrides_lines[1::3])
_assert_is_all_blank_lines(overrides_lines[2::3], overrides_file)
return dict(overrides_pairs)
| [
"def",
"_read_overrides",
"(",
"overrides_file",
")",
":",
"if",
"(",
"not",
"overrides_file",
")",
":",
"return",
"{",
"}",
"overrides_lines",
"=",
"[",
"line",
"for",
"line",
"in",
"overrides_file",
".",
"readlines",
"(",
")",
"if",
"(",
"not",
"line",
".",
"startswith",
"(",
"'#'",
")",
")",
"]",
"overrides_pairs",
"=",
"zip",
"(",
"overrides_lines",
"[",
"0",
":",
":",
"3",
"]",
",",
"overrides_lines",
"[",
"1",
":",
":",
"3",
"]",
")",
"_assert_is_all_blank_lines",
"(",
"overrides_lines",
"[",
"2",
":",
":",
"3",
"]",
",",
"overrides_file",
")",
"return",
"dict",
"(",
"overrides_pairs",
")"
] | read pattern overrides from overrides_file . | train | false |
41,556 | def is_possible_number(numobj):
return (is_possible_number_with_reason(numobj) == ValidationResult.IS_POSSIBLE)
| [
"def",
"is_possible_number",
"(",
"numobj",
")",
":",
"return",
"(",
"is_possible_number_with_reason",
"(",
"numobj",
")",
"==",
"ValidationResult",
".",
"IS_POSSIBLE",
")"
] | convenience wrapper around is_possible_number_with_reason . | train | false |
41,557 | def push_status_message(message, kind='warning', dismissible=True, trust=True, jumbotron=False):
try:
statuses = session.data.get('status')
except RuntimeError as e:
exception_message = getattr(e, 'message', None)
if (exception_message == 'working outside of request context'):
if (kind == 'error'):
from rest_framework.exceptions import ValidationError
raise ValidationError(message)
return
else:
raise
if (not statuses):
statuses = []
css_class = TYPE_MAP.get(kind, 'warning')
statuses.append(Status(message=message, jumbotron=jumbotron, css_class=css_class, dismissible=dismissible, trust=trust))
session.data['status'] = statuses
| [
"def",
"push_status_message",
"(",
"message",
",",
"kind",
"=",
"'warning'",
",",
"dismissible",
"=",
"True",
",",
"trust",
"=",
"True",
",",
"jumbotron",
"=",
"False",
")",
":",
"try",
":",
"statuses",
"=",
"session",
".",
"data",
".",
"get",
"(",
"'status'",
")",
"except",
"RuntimeError",
"as",
"e",
":",
"exception_message",
"=",
"getattr",
"(",
"e",
",",
"'message'",
",",
"None",
")",
"if",
"(",
"exception_message",
"==",
"'working outside of request context'",
")",
":",
"if",
"(",
"kind",
"==",
"'error'",
")",
":",
"from",
"rest_framework",
".",
"exceptions",
"import",
"ValidationError",
"raise",
"ValidationError",
"(",
"message",
")",
"return",
"else",
":",
"raise",
"if",
"(",
"not",
"statuses",
")",
":",
"statuses",
"=",
"[",
"]",
"css_class",
"=",
"TYPE_MAP",
".",
"get",
"(",
"kind",
",",
"'warning'",
")",
"statuses",
".",
"append",
"(",
"Status",
"(",
"message",
"=",
"message",
",",
"jumbotron",
"=",
"jumbotron",
",",
"css_class",
"=",
"css_class",
",",
"dismissible",
"=",
"dismissible",
",",
"trust",
"=",
"trust",
")",
")",
"session",
".",
"data",
"[",
"'status'",
"]",
"=",
"statuses"
] | push a status message that will be displayed as a banner on the next page loaded by the user . | train | false |
41,559 | def migRing(populations, k, selection, replacement=None, migarray=None):
nbr_demes = len(populations)
if (migarray is None):
migarray = (range(1, nbr_demes) + [0])
immigrants = [[] for i in xrange(nbr_demes)]
emigrants = [[] for i in xrange(nbr_demes)]
for from_deme in xrange(nbr_demes):
emigrants[from_deme].extend(selection(populations[from_deme], k))
if (replacement is None):
immigrants[from_deme] = emigrants[from_deme]
else:
immigrants[from_deme].extend(replacement(populations[from_deme], k))
for (from_deme, to_deme) in enumerate(migarray):
for (i, immigrant) in enumerate(immigrants[to_deme]):
indx = populations[to_deme].index(immigrant)
populations[to_deme][indx] = emigrants[from_deme][i]
| [
"def",
"migRing",
"(",
"populations",
",",
"k",
",",
"selection",
",",
"replacement",
"=",
"None",
",",
"migarray",
"=",
"None",
")",
":",
"nbr_demes",
"=",
"len",
"(",
"populations",
")",
"if",
"(",
"migarray",
"is",
"None",
")",
":",
"migarray",
"=",
"(",
"range",
"(",
"1",
",",
"nbr_demes",
")",
"+",
"[",
"0",
"]",
")",
"immigrants",
"=",
"[",
"[",
"]",
"for",
"i",
"in",
"xrange",
"(",
"nbr_demes",
")",
"]",
"emigrants",
"=",
"[",
"[",
"]",
"for",
"i",
"in",
"xrange",
"(",
"nbr_demes",
")",
"]",
"for",
"from_deme",
"in",
"xrange",
"(",
"nbr_demes",
")",
":",
"emigrants",
"[",
"from_deme",
"]",
".",
"extend",
"(",
"selection",
"(",
"populations",
"[",
"from_deme",
"]",
",",
"k",
")",
")",
"if",
"(",
"replacement",
"is",
"None",
")",
":",
"immigrants",
"[",
"from_deme",
"]",
"=",
"emigrants",
"[",
"from_deme",
"]",
"else",
":",
"immigrants",
"[",
"from_deme",
"]",
".",
"extend",
"(",
"replacement",
"(",
"populations",
"[",
"from_deme",
"]",
",",
"k",
")",
")",
"for",
"(",
"from_deme",
",",
"to_deme",
")",
"in",
"enumerate",
"(",
"migarray",
")",
":",
"for",
"(",
"i",
",",
"immigrant",
")",
"in",
"enumerate",
"(",
"immigrants",
"[",
"to_deme",
"]",
")",
":",
"indx",
"=",
"populations",
"[",
"to_deme",
"]",
".",
"index",
"(",
"immigrant",
")",
"populations",
"[",
"to_deme",
"]",
"[",
"indx",
"]",
"=",
"emigrants",
"[",
"from_deme",
"]",
"[",
"i",
"]"
] | perform a ring migration between the *populations* . | train | false |
41,560 | def denoise_tv_chambolle(im, weight=0.1, eps=0.0002, n_iter_max=200, multichannel=False):
im_type = im.dtype
if (not (im_type.kind == 'f')):
im = img_as_float(im)
if multichannel:
out = np.zeros_like(im)
for c in range(im.shape[(-1)]):
out[..., c] = _denoise_tv_chambolle_nd(im[..., c], weight, eps, n_iter_max)
else:
out = _denoise_tv_chambolle_nd(im, weight, eps, n_iter_max)
return out
| [
"def",
"denoise_tv_chambolle",
"(",
"im",
",",
"weight",
"=",
"0.1",
",",
"eps",
"=",
"0.0002",
",",
"n_iter_max",
"=",
"200",
",",
"multichannel",
"=",
"False",
")",
":",
"im_type",
"=",
"im",
".",
"dtype",
"if",
"(",
"not",
"(",
"im_type",
".",
"kind",
"==",
"'f'",
")",
")",
":",
"im",
"=",
"img_as_float",
"(",
"im",
")",
"if",
"multichannel",
":",
"out",
"=",
"np",
".",
"zeros_like",
"(",
"im",
")",
"for",
"c",
"in",
"range",
"(",
"im",
".",
"shape",
"[",
"(",
"-",
"1",
")",
"]",
")",
":",
"out",
"[",
"...",
",",
"c",
"]",
"=",
"_denoise_tv_chambolle_nd",
"(",
"im",
"[",
"...",
",",
"c",
"]",
",",
"weight",
",",
"eps",
",",
"n_iter_max",
")",
"else",
":",
"out",
"=",
"_denoise_tv_chambolle_nd",
"(",
"im",
",",
"weight",
",",
"eps",
",",
"n_iter_max",
")",
"return",
"out"
] | perform total-variation denoising on n-dimensional images . | train | false |
41,561 | def _get_featurestate(name):
featurestate = _featurestate_cache.get(name, None)
if (featurestate is None):
featurestate = FeatureState(name, _world)
_featurestate_cache[name] = featurestate
return featurestate
| [
"def",
"_get_featurestate",
"(",
"name",
")",
":",
"featurestate",
"=",
"_featurestate_cache",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"(",
"featurestate",
"is",
"None",
")",
":",
"featurestate",
"=",
"FeatureState",
"(",
"name",
",",
"_world",
")",
"_featurestate_cache",
"[",
"name",
"]",
"=",
"featurestate",
"return",
"featurestate"
] | get a featurestate object for this feature . | train | false |
41,562 | def sorted_proposals(proposals, scopepref=None, typepref=None):
sorter = _ProposalSorter(proposals, scopepref, typepref)
return sorter.get_sorted_proposal_list()
| [
"def",
"sorted_proposals",
"(",
"proposals",
",",
"scopepref",
"=",
"None",
",",
"typepref",
"=",
"None",
")",
":",
"sorter",
"=",
"_ProposalSorter",
"(",
"proposals",
",",
"scopepref",
",",
"typepref",
")",
"return",
"sorter",
".",
"get_sorted_proposal_list",
"(",
")"
] | sort a list of proposals return a sorted list of the given codeassistproposals . | train | true |
41,563 | def displayhosts(recordsgen, out=sys.stdout, **kargs):
for record in recordsgen:
displayhost(record, out=out, **kargs)
if os.isatty(out.fileno()):
raw_input()
else:
out.write('\n')
| [
"def",
"displayhosts",
"(",
"recordsgen",
",",
"out",
"=",
"sys",
".",
"stdout",
",",
"**",
"kargs",
")",
":",
"for",
"record",
"in",
"recordsgen",
":",
"displayhost",
"(",
"record",
",",
"out",
"=",
"out",
",",
"**",
"kargs",
")",
"if",
"os",
".",
"isatty",
"(",
"out",
".",
"fileno",
"(",
")",
")",
":",
"raw_input",
"(",
")",
"else",
":",
"out",
".",
"write",
"(",
"'\\n'",
")"
] | displays the nmap scan results generated by recordsgen . | train | false |
41,564 | def _extract_possible_number(number):
match = _VALID_START_CHAR_PATTERN.search(number)
if match:
number = number[match.start():]
trailing_chars_match = _UNWANTED_END_CHAR_PATTERN.search(number)
if trailing_chars_match:
number = number[:trailing_chars_match.start()]
second_number_match = _SECOND_NUMBER_START_PATTERN.search(number)
if second_number_match:
number = number[:second_number_match.start()]
return number
else:
return U_EMPTY_STRING
| [
"def",
"_extract_possible_number",
"(",
"number",
")",
":",
"match",
"=",
"_VALID_START_CHAR_PATTERN",
".",
"search",
"(",
"number",
")",
"if",
"match",
":",
"number",
"=",
"number",
"[",
"match",
".",
"start",
"(",
")",
":",
"]",
"trailing_chars_match",
"=",
"_UNWANTED_END_CHAR_PATTERN",
".",
"search",
"(",
"number",
")",
"if",
"trailing_chars_match",
":",
"number",
"=",
"number",
"[",
":",
"trailing_chars_match",
".",
"start",
"(",
")",
"]",
"second_number_match",
"=",
"_SECOND_NUMBER_START_PATTERN",
".",
"search",
"(",
"number",
")",
"if",
"second_number_match",
":",
"number",
"=",
"number",
"[",
":",
"second_number_match",
".",
"start",
"(",
")",
"]",
"return",
"number",
"else",
":",
"return",
"U_EMPTY_STRING"
] | attempt to extract a possible number from the string passed in . | train | true |
41,566 | def set_user_agent(http, user_agent):
request_orig = http.request
def new_request(uri, method='GET', body=None, headers=None, redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
'Modify the request headers to add the user-agent.'
if (headers is None):
headers = {}
if ('user-agent' in headers):
headers['user-agent'] = ((user_agent + ' ') + headers['user-agent'])
else:
headers['user-agent'] = user_agent
(resp, content) = request_orig(uri, method, body, headers, redirections, connection_type)
return (resp, content)
http.request = new_request
return http
| [
"def",
"set_user_agent",
"(",
"http",
",",
"user_agent",
")",
":",
"request_orig",
"=",
"http",
".",
"request",
"def",
"new_request",
"(",
"uri",
",",
"method",
"=",
"'GET'",
",",
"body",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"redirections",
"=",
"httplib2",
".",
"DEFAULT_MAX_REDIRECTS",
",",
"connection_type",
"=",
"None",
")",
":",
"if",
"(",
"headers",
"is",
"None",
")",
":",
"headers",
"=",
"{",
"}",
"if",
"(",
"'user-agent'",
"in",
"headers",
")",
":",
"headers",
"[",
"'user-agent'",
"]",
"=",
"(",
"(",
"user_agent",
"+",
"' '",
")",
"+",
"headers",
"[",
"'user-agent'",
"]",
")",
"else",
":",
"headers",
"[",
"'user-agent'",
"]",
"=",
"user_agent",
"(",
"resp",
",",
"content",
")",
"=",
"request_orig",
"(",
"uri",
",",
"method",
",",
"body",
",",
"headers",
",",
"redirections",
",",
"connection_type",
")",
"return",
"(",
"resp",
",",
"content",
")",
"http",
".",
"request",
"=",
"new_request",
"return",
"http"
] | set the user-agent on every request . | train | false |
41,567 | def select_url_from_video_api(html):
c = json.loads(html)
video_dic = {}
for i in c['mp4']:
video_dic[i['code']] = i['http']
quality_preference_list = ['sdvd', 'hd', 'dvd', 'sd']
url = [video_dic[quality] for quality in quality_preference_list if (quality in video_dic)][0]
html = get_html(url)
c = json.loads(html)
return [i['urls'][0] for i in c['playlist']]
| [
"def",
"select_url_from_video_api",
"(",
"html",
")",
":",
"c",
"=",
"json",
".",
"loads",
"(",
"html",
")",
"video_dic",
"=",
"{",
"}",
"for",
"i",
"in",
"c",
"[",
"'mp4'",
"]",
":",
"video_dic",
"[",
"i",
"[",
"'code'",
"]",
"]",
"=",
"i",
"[",
"'http'",
"]",
"quality_preference_list",
"=",
"[",
"'sdvd'",
",",
"'hd'",
",",
"'dvd'",
",",
"'sd'",
"]",
"url",
"=",
"[",
"video_dic",
"[",
"quality",
"]",
"for",
"quality",
"in",
"quality_preference_list",
"if",
"(",
"quality",
"in",
"video_dic",
")",
"]",
"[",
"0",
"]",
"html",
"=",
"get_html",
"(",
"url",
")",
"c",
"=",
"json",
".",
"loads",
"(",
"html",
")",
"return",
"[",
"i",
"[",
"'urls'",
"]",
"[",
"0",
"]",
"for",
"i",
"in",
"c",
"[",
"'playlist'",
"]",
"]"
] | str->str choose the best one . | train | false |
41,568 | def retry_all_jobs():
history_db = sabnzbd.connect_db()
return NzbQueue.do.retry_all_jobs(history_db)
| [
"def",
"retry_all_jobs",
"(",
")",
":",
"history_db",
"=",
"sabnzbd",
".",
"connect_db",
"(",
")",
"return",
"NzbQueue",
".",
"do",
".",
"retry_all_jobs",
"(",
"history_db",
")"
] | re enter all failed jobs in the download queue . | train | false |
41,569 | def listfy(data):
if isinstance(data, basestring):
data = data.split(',')
elif (not isinstance(data, list)):
data = [data]
return data
| [
"def",
"listfy",
"(",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"basestring",
")",
":",
"data",
"=",
"data",
".",
"split",
"(",
"','",
")",
"elif",
"(",
"not",
"isinstance",
"(",
"data",
",",
"list",
")",
")",
":",
"data",
"=",
"[",
"data",
"]",
"return",
"data"
] | check and convert data to list . | train | false |
41,570 | def Opt(re):
result = Alt(re, Empty)
result.str = ('Opt(%s)' % re)
return result
| [
"def",
"Opt",
"(",
"re",
")",
":",
"result",
"=",
"Alt",
"(",
"re",
",",
"Empty",
")",
"result",
".",
"str",
"=",
"(",
"'Opt(%s)'",
"%",
"re",
")",
"return",
"result"
] | opt is an re which matches either |re| or the empty string . | train | false |
41,571 | def _valid_str(value):
return (isinstance(value, six.string_types) and (len(value) > 0))
| [
"def",
"_valid_str",
"(",
"value",
")",
":",
"return",
"(",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
"and",
"(",
"len",
"(",
"value",
")",
">",
"0",
")",
")"
] | valid str? . | train | false |
41,572 | def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None):
(info, new_pointer) = _parse(encoded_data, len(encoded_data), pointer)
return (_build(spec=spec, spec_params=spec_params, *info), new_pointer)
| [
"def",
"_parse_build",
"(",
"encoded_data",
",",
"pointer",
"=",
"0",
",",
"spec",
"=",
"None",
",",
"spec_params",
"=",
"None",
")",
":",
"(",
"info",
",",
"new_pointer",
")",
"=",
"_parse",
"(",
"encoded_data",
",",
"len",
"(",
"encoded_data",
")",
",",
"pointer",
")",
"return",
"(",
"_build",
"(",
"spec",
"=",
"spec",
",",
"spec_params",
"=",
"spec_params",
",",
"*",
"info",
")",
",",
"new_pointer",
")"
] | parses a byte string generically . | train | false |
41,573 | def select_node(nodes):
if nodes:
return random.choice(nodes)
else:
raise EmptyClusterError('Cluster contains no nodes.')
| [
"def",
"select_node",
"(",
"nodes",
")",
":",
"if",
"nodes",
":",
"return",
"random",
".",
"choice",
"(",
"nodes",
")",
"else",
":",
"raise",
"EmptyClusterError",
"(",
"'Cluster contains no nodes.'",
")"
] | select a node from a list of nodes . | train | false |
41,574 | def pportInPaperOut():
if (port.DlPortReadPortUchar(statusRegAdrs) & 32):
return 1
else:
return 0
| [
"def",
"pportInPaperOut",
"(",
")",
":",
"if",
"(",
"port",
".",
"DlPortReadPortUchar",
"(",
"statusRegAdrs",
")",
"&",
"32",
")",
":",
"return",
"1",
"else",
":",
"return",
"0"
] | input from paper out pin . | train | false |
41,575 | def set_startup_disk(path):
if (path not in list_startup_disks()):
msg = 'Invalid value passed for path.\nMust be a valid startup disk as found in system.list_startup_disks.\nPassed: {0}'.format(path)
raise SaltInvocationError(msg)
cmd = 'systemsetup -setstartupdisk {0}'.format(path)
salt.utils.mac_utils.execute_return_result(cmd)
return salt.utils.mac_utils.confirm_updated(path, get_startup_disk)
| [
"def",
"set_startup_disk",
"(",
"path",
")",
":",
"if",
"(",
"path",
"not",
"in",
"list_startup_disks",
"(",
")",
")",
":",
"msg",
"=",
"'Invalid value passed for path.\\nMust be a valid startup disk as found in system.list_startup_disks.\\nPassed: {0}'",
".",
"format",
"(",
"path",
")",
"raise",
"SaltInvocationError",
"(",
"msg",
")",
"cmd",
"=",
"'systemsetup -setstartupdisk {0}'",
".",
"format",
"(",
"path",
")",
"salt",
".",
"utils",
".",
"mac_utils",
".",
"execute_return_result",
"(",
"cmd",
")",
"return",
"salt",
".",
"utils",
".",
"mac_utils",
".",
"confirm_updated",
"(",
"path",
",",
"get_startup_disk",
")"
] | set the current startup disk to the indicated path . | train | false |
41,577 | def get_texpath():
def _get_texpath():
try:
texpath = get_setting(sublime.platform(), {}).get('texpath')
except AttributeError:
exc_info = sys.exc_info
try:
reload(sys.modules[get_texpath.__module__])
texpath = get_setting(sublime.platform(), {}).get('texpath')
except:
reraise(*exc_info)
return (expand_vars(texpath) if (texpath is not None) else None)
return run_on_main_thread(_get_texpath, default_value=None)
| [
"def",
"get_texpath",
"(",
")",
":",
"def",
"_get_texpath",
"(",
")",
":",
"try",
":",
"texpath",
"=",
"get_setting",
"(",
"sublime",
".",
"platform",
"(",
")",
",",
"{",
"}",
")",
".",
"get",
"(",
"'texpath'",
")",
"except",
"AttributeError",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"try",
":",
"reload",
"(",
"sys",
".",
"modules",
"[",
"get_texpath",
".",
"__module__",
"]",
")",
"texpath",
"=",
"get_setting",
"(",
"sublime",
".",
"platform",
"(",
")",
",",
"{",
"}",
")",
".",
"get",
"(",
"'texpath'",
")",
"except",
":",
"reraise",
"(",
"*",
"exc_info",
")",
"return",
"(",
"expand_vars",
"(",
"texpath",
")",
"if",
"(",
"texpath",
"is",
"not",
"None",
")",
"else",
"None",
")",
"return",
"run_on_main_thread",
"(",
"_get_texpath",
",",
"default_value",
"=",
"None",
")"
] | returns the texpath setting with any environment variables expanded . | train | false |
41,580 | @receiver(post_save, sender=CourseCreator)
def post_save_callback(sender, **kwargs):
instance = kwargs['instance']
if (instance.state != instance.orig_state):
granted_state_change = ((instance.state == CourseCreator.GRANTED) or (instance.orig_state == CourseCreator.GRANTED))
if granted_state_change:
assert hasattr(instance, 'admin'), 'Must have stored staff user to change course creator group'
update_creator_state.send(sender=sender, caller=instance.admin, user=instance.user, state=instance.state)
if ((instance.state == CourseCreator.DENIED) or granted_state_change):
send_user_notification.send(sender=sender, user=instance.user, state=instance.state)
if (instance.state == CourseCreator.PENDING):
send_admin_notification.send(sender=sender, user=instance.user)
instance.state_changed = timezone.now()
instance.orig_state = instance.state
instance.save()
| [
"@",
"receiver",
"(",
"post_save",
",",
"sender",
"=",
"CourseCreator",
")",
"def",
"post_save_callback",
"(",
"sender",
",",
"**",
"kwargs",
")",
":",
"instance",
"=",
"kwargs",
"[",
"'instance'",
"]",
"if",
"(",
"instance",
".",
"state",
"!=",
"instance",
".",
"orig_state",
")",
":",
"granted_state_change",
"=",
"(",
"(",
"instance",
".",
"state",
"==",
"CourseCreator",
".",
"GRANTED",
")",
"or",
"(",
"instance",
".",
"orig_state",
"==",
"CourseCreator",
".",
"GRANTED",
")",
")",
"if",
"granted_state_change",
":",
"assert",
"hasattr",
"(",
"instance",
",",
"'admin'",
")",
",",
"'Must have stored staff user to change course creator group'",
"update_creator_state",
".",
"send",
"(",
"sender",
"=",
"sender",
",",
"caller",
"=",
"instance",
".",
"admin",
",",
"user",
"=",
"instance",
".",
"user",
",",
"state",
"=",
"instance",
".",
"state",
")",
"if",
"(",
"(",
"instance",
".",
"state",
"==",
"CourseCreator",
".",
"DENIED",
")",
"or",
"granted_state_change",
")",
":",
"send_user_notification",
".",
"send",
"(",
"sender",
"=",
"sender",
",",
"user",
"=",
"instance",
".",
"user",
",",
"state",
"=",
"instance",
".",
"state",
")",
"if",
"(",
"instance",
".",
"state",
"==",
"CourseCreator",
".",
"PENDING",
")",
":",
"send_admin_notification",
".",
"send",
"(",
"sender",
"=",
"sender",
",",
"user",
"=",
"instance",
".",
"user",
")",
"instance",
".",
"state_changed",
"=",
"timezone",
".",
"now",
"(",
")",
"instance",
".",
"orig_state",
"=",
"instance",
".",
"state",
"instance",
".",
"save",
"(",
")"
] | event changes to user preferences . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.