id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
18,210 | def print_bad():
print BAD_OUTPUT
print >>sys.stdout, BAD_OUTPUT
print >>sys.stderr, BAD_OUTPUT
sys.stdout.write(BAD_OUTPUT)
sys.stdout.writelines([BAD_OUTPUT])
sys.stderr.write(BAD_OUTPUT)
sys.stderr.writelines([BAD_OUTPUT])
| [
"def",
"print_bad",
"(",
")",
":",
"print",
"BAD_OUTPUT",
"print",
">>",
"sys",
".",
"stdout",
",",
"BAD_OUTPUT",
"print",
">>",
"sys",
".",
"stderr",
",",
"BAD_OUTPUT",
"sys",
".",
"stdout",
".",
"write",
"(",
"BAD_OUTPUT",
")",
"sys",
".",
"stdout",
".",
"writelines",
"(",
"[",
"BAD_OUTPUT",
"]",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"BAD_OUTPUT",
")",
"sys",
".",
"stderr",
".",
"writelines",
"(",
"[",
"BAD_OUTPUT",
"]",
")"
] | if any of this makes it to stdout/stderr . | train | false |
18,211 | def vectorized_is_element(array, choices):
return vectorize(choices.__contains__, otypes=[bool])(array)
| [
"def",
"vectorized_is_element",
"(",
"array",
",",
"choices",
")",
":",
"return",
"vectorize",
"(",
"choices",
".",
"__contains__",
",",
"otypes",
"=",
"[",
"bool",
"]",
")",
"(",
"array",
")"
] | check if each element of array is in choices . | train | false |
18,212 | def fun_takes_kwargs(fun, kwlist=[]):
argspec = getattr(fun, 'argspec', getargspec(fun))
(args, _varargs, keywords, _defaults) = argspec
if (keywords != None):
return kwlist
return filter(partial(operator.contains, args), kwlist)
| [
"def",
"fun_takes_kwargs",
"(",
"fun",
",",
"kwlist",
"=",
"[",
"]",
")",
":",
"argspec",
"=",
"getattr",
"(",
"fun",
",",
"'argspec'",
",",
"getargspec",
"(",
"fun",
")",
")",
"(",
"args",
",",
"_varargs",
",",
"keywords",
",",
"_defaults",
")",
"=",
"argspec",
"if",
"(",
"keywords",
"!=",
"None",
")",
":",
"return",
"kwlist",
"return",
"filter",
"(",
"partial",
"(",
"operator",
".",
"contains",
",",
"args",
")",
",",
"kwlist",
")"
] | with a function . | train | false |
18,213 | @pytest.mark.multithread
def test_num_reads_threads():
import threading
def count_reads(rparser):
for _ in rparser:
pass
n_threads = 4
threads = []
rparser = ReadParser(utils.get_test_data('100-reads.fq.gz'))
for _ in range(n_threads):
thr = threading.Thread(target=count_reads, args=[rparser])
threads.append(thr)
thr.start()
for thr in threads:
thr.join()
assert (rparser.num_reads == 100)
| [
"@",
"pytest",
".",
"mark",
".",
"multithread",
"def",
"test_num_reads_threads",
"(",
")",
":",
"import",
"threading",
"def",
"count_reads",
"(",
"rparser",
")",
":",
"for",
"_",
"in",
"rparser",
":",
"pass",
"n_threads",
"=",
"4",
"threads",
"=",
"[",
"]",
"rparser",
"=",
"ReadParser",
"(",
"utils",
".",
"get_test_data",
"(",
"'100-reads.fq.gz'",
")",
")",
"for",
"_",
"in",
"range",
"(",
"n_threads",
")",
":",
"thr",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"count_reads",
",",
"args",
"=",
"[",
"rparser",
"]",
")",
"threads",
".",
"append",
"(",
"thr",
")",
"thr",
".",
"start",
"(",
")",
"for",
"thr",
"in",
"threads",
":",
"thr",
".",
"join",
"(",
")",
"assert",
"(",
"rparser",
".",
"num_reads",
"==",
"100",
")"
] | test threadsaftey of readparsers read counting . | train | false |
18,216 | def encodeBase58(num, alphabet=ALPHABET):
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = (num % base)
num = (num // base)
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
| [
"def",
"encodeBase58",
"(",
"num",
",",
"alphabet",
"=",
"ALPHABET",
")",
":",
"if",
"(",
"num",
"==",
"0",
")",
":",
"return",
"alphabet",
"[",
"0",
"]",
"arr",
"=",
"[",
"]",
"base",
"=",
"len",
"(",
"alphabet",
")",
"while",
"num",
":",
"rem",
"=",
"(",
"num",
"%",
"base",
")",
"num",
"=",
"(",
"num",
"//",
"base",
")",
"arr",
".",
"append",
"(",
"alphabet",
"[",
"rem",
"]",
")",
"arr",
".",
"reverse",
"(",
")",
"return",
"''",
".",
"join",
"(",
"arr",
")"
] | encode a number in base x num: the number to encode alphabet: the alphabet to use for encoding . | train | true |
18,217 | def test_refresh_called_twice(refresher):
callbacks = Mock()
pgexecute = Mock()
special = Mock()
def dummy_bg_refresh(*args):
time.sleep(3)
refresher._bg_refresh = dummy_bg_refresh
actual1 = refresher.refresh(pgexecute, special, callbacks)
time.sleep(1)
assert (len(actual1) == 1)
assert (len(actual1[0]) == 4)
assert (actual1[0][3] == 'Auto-completion refresh started in the background.')
actual2 = refresher.refresh(pgexecute, special, callbacks)
time.sleep(1)
assert (len(actual2) == 1)
assert (len(actual2[0]) == 4)
assert (actual2[0][3] == 'Auto-completion refresh restarted.')
| [
"def",
"test_refresh_called_twice",
"(",
"refresher",
")",
":",
"callbacks",
"=",
"Mock",
"(",
")",
"pgexecute",
"=",
"Mock",
"(",
")",
"special",
"=",
"Mock",
"(",
")",
"def",
"dummy_bg_refresh",
"(",
"*",
"args",
")",
":",
"time",
".",
"sleep",
"(",
"3",
")",
"refresher",
".",
"_bg_refresh",
"=",
"dummy_bg_refresh",
"actual1",
"=",
"refresher",
".",
"refresh",
"(",
"pgexecute",
",",
"special",
",",
"callbacks",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"assert",
"(",
"len",
"(",
"actual1",
")",
"==",
"1",
")",
"assert",
"(",
"len",
"(",
"actual1",
"[",
"0",
"]",
")",
"==",
"4",
")",
"assert",
"(",
"actual1",
"[",
"0",
"]",
"[",
"3",
"]",
"==",
"'Auto-completion refresh started in the background.'",
")",
"actual2",
"=",
"refresher",
".",
"refresh",
"(",
"pgexecute",
",",
"special",
",",
"callbacks",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"assert",
"(",
"len",
"(",
"actual2",
")",
"==",
"1",
")",
"assert",
"(",
"len",
"(",
"actual2",
"[",
"0",
"]",
")",
"==",
"4",
")",
"assert",
"(",
"actual2",
"[",
"0",
"]",
"[",
"3",
"]",
"==",
"'Auto-completion refresh restarted.'",
")"
] | if refresh is called a second time . | train | false |
18,221 | def premetadata_create_container_info_table(self, conn, put_timestamp, _spi=None):
if (put_timestamp is None):
put_timestamp = Timestamp(0).internal
conn.executescript("\n CREATE TABLE container_stat (\n account TEXT,\n container TEXT,\n created_at TEXT,\n put_timestamp TEXT DEFAULT '0',\n delete_timestamp TEXT DEFAULT '0',\n object_count INTEGER,\n bytes_used INTEGER,\n reported_put_timestamp TEXT DEFAULT '0',\n reported_delete_timestamp TEXT DEFAULT '0',\n reported_object_count INTEGER DEFAULT 0,\n reported_bytes_used INTEGER DEFAULT 0,\n hash TEXT default '00000000000000000000000000000000',\n id TEXT,\n status TEXT DEFAULT '',\n status_changed_at TEXT DEFAULT '0'\n );\n\n INSERT INTO container_stat (object_count, bytes_used)\n VALUES (0, 0);\n ")
conn.execute('\n UPDATE container_stat\n SET account = ?, container = ?, created_at = ?, id = ?,\n put_timestamp = ?\n ', (self.account, self.container, Timestamp(time()).internal, str(uuid4()), put_timestamp))
| [
"def",
"premetadata_create_container_info_table",
"(",
"self",
",",
"conn",
",",
"put_timestamp",
",",
"_spi",
"=",
"None",
")",
":",
"if",
"(",
"put_timestamp",
"is",
"None",
")",
":",
"put_timestamp",
"=",
"Timestamp",
"(",
"0",
")",
".",
"internal",
"conn",
".",
"executescript",
"(",
"\"\\n CREATE TABLE container_stat (\\n account TEXT,\\n container TEXT,\\n created_at TEXT,\\n put_timestamp TEXT DEFAULT '0',\\n delete_timestamp TEXT DEFAULT '0',\\n object_count INTEGER,\\n bytes_used INTEGER,\\n reported_put_timestamp TEXT DEFAULT '0',\\n reported_delete_timestamp TEXT DEFAULT '0',\\n reported_object_count INTEGER DEFAULT 0,\\n reported_bytes_used INTEGER DEFAULT 0,\\n hash TEXT default '00000000000000000000000000000000',\\n id TEXT,\\n status TEXT DEFAULT '',\\n status_changed_at TEXT DEFAULT '0'\\n );\\n\\n INSERT INTO container_stat (object_count, bytes_used)\\n VALUES (0, 0);\\n \"",
")",
"conn",
".",
"execute",
"(",
"'\\n UPDATE container_stat\\n SET account = ?, container = ?, created_at = ?, id = ?,\\n put_timestamp = ?\\n '",
",",
"(",
"self",
".",
"account",
",",
"self",
".",
"container",
",",
"Timestamp",
"(",
"time",
"(",
")",
")",
".",
"internal",
",",
"str",
"(",
"uuid4",
"(",
")",
")",
",",
"put_timestamp",
")",
")"
] | copied from containerbroker before the metadata column was added; used for testing with testcontainerbrokerbeforemetadata . | train | false |
18,222 | def GetRequestFormatMode(request, method_metadata):
if request.path.startswith('/api/v2/'):
return JsonMode.PROTO3_JSON_MODE
if (hasattr(request, 'GET') and request.GET.get('strip_type_info', '')):
return JsonMode.GRR_TYPE_STRIPPED_JSON_MODE
for (http_method, unused_url, options) in method_metadata.http_methods:
if ((http_method == request.method) and options.get('strip_root_types', False)):
return JsonMode.GRR_ROOT_TYPES_STRIPPED_JSON_MODE
return JsonMode.GRR_JSON_MODE
| [
"def",
"GetRequestFormatMode",
"(",
"request",
",",
"method_metadata",
")",
":",
"if",
"request",
".",
"path",
".",
"startswith",
"(",
"'/api/v2/'",
")",
":",
"return",
"JsonMode",
".",
"PROTO3_JSON_MODE",
"if",
"(",
"hasattr",
"(",
"request",
",",
"'GET'",
")",
"and",
"request",
".",
"GET",
".",
"get",
"(",
"'strip_type_info'",
",",
"''",
")",
")",
":",
"return",
"JsonMode",
".",
"GRR_TYPE_STRIPPED_JSON_MODE",
"for",
"(",
"http_method",
",",
"unused_url",
",",
"options",
")",
"in",
"method_metadata",
".",
"http_methods",
":",
"if",
"(",
"(",
"http_method",
"==",
"request",
".",
"method",
")",
"and",
"options",
".",
"get",
"(",
"'strip_root_types'",
",",
"False",
")",
")",
":",
"return",
"JsonMode",
".",
"GRR_ROOT_TYPES_STRIPPED_JSON_MODE",
"return",
"JsonMode",
".",
"GRR_JSON_MODE"
] | returns json format mode corresponding to a given request and method . | train | true |
18,224 | def hash_from_ndarray(data):
if (not data.flags['C_CONTIGUOUS']):
data = numpy.ascontiguousarray(data)
return hash_from_code((((hash_from_code(data) + hash_from_code(str(data.shape))) + hash_from_code(str(data.strides))) + hash_from_code(str(data.dtype))))
| [
"def",
"hash_from_ndarray",
"(",
"data",
")",
":",
"if",
"(",
"not",
"data",
".",
"flags",
"[",
"'C_CONTIGUOUS'",
"]",
")",
":",
"data",
"=",
"numpy",
".",
"ascontiguousarray",
"(",
"data",
")",
"return",
"hash_from_code",
"(",
"(",
"(",
"(",
"hash_from_code",
"(",
"data",
")",
"+",
"hash_from_code",
"(",
"str",
"(",
"data",
".",
"shape",
")",
")",
")",
"+",
"hash_from_code",
"(",
"str",
"(",
"data",
".",
"strides",
")",
")",
")",
"+",
"hash_from_code",
"(",
"str",
"(",
"data",
".",
"dtype",
")",
")",
")",
")"
] | return a hash from an ndarray . | train | false |
18,226 | def _fix_tagging(value, params):
if (u'tag_type' in params):
required_tag_type = params[u'tag_type']
retag = False
if (required_tag_type != value.tag_type):
retag = True
elif ((required_tag_type == u'explicit') and (value.explicit_tag != params[u'tag'])):
retag = True
elif ((required_tag_type == u'implicit') and (value.tag != params[u'tag'])):
retag = True
if retag:
return value.retag(params[u'tag_type'], params[u'tag'])
return value
if value.tag_type:
return value.untag()
return value
| [
"def",
"_fix_tagging",
"(",
"value",
",",
"params",
")",
":",
"if",
"(",
"u'tag_type'",
"in",
"params",
")",
":",
"required_tag_type",
"=",
"params",
"[",
"u'tag_type'",
"]",
"retag",
"=",
"False",
"if",
"(",
"required_tag_type",
"!=",
"value",
".",
"tag_type",
")",
":",
"retag",
"=",
"True",
"elif",
"(",
"(",
"required_tag_type",
"==",
"u'explicit'",
")",
"and",
"(",
"value",
".",
"explicit_tag",
"!=",
"params",
"[",
"u'tag'",
"]",
")",
")",
":",
"retag",
"=",
"True",
"elif",
"(",
"(",
"required_tag_type",
"==",
"u'implicit'",
")",
"and",
"(",
"value",
".",
"tag",
"!=",
"params",
"[",
"u'tag'",
"]",
")",
")",
":",
"retag",
"=",
"True",
"if",
"retag",
":",
"return",
"value",
".",
"retag",
"(",
"params",
"[",
"u'tag_type'",
"]",
",",
"params",
"[",
"u'tag'",
"]",
")",
"return",
"value",
"if",
"value",
".",
"tag_type",
":",
"return",
"value",
".",
"untag",
"(",
")",
"return",
"value"
] | checks if a value is properly tagged based on the spec . | train | false |
18,227 | def find_orphaned_instances(xenapi):
ctxt = context.get_admin_context(read_deleted='only')
orphaned_instances = []
for (vm_ref, vm_rec) in _get_applicable_vm_recs(xenapi):
try:
uuid = vm_rec['other_config']['nova_uuid']
instance = db.api.instance_get_by_uuid(ctxt, uuid)
except (KeyError, exception.InstanceNotFound):
print_xen_object('INFO: Ignoring VM', vm_rec, indent_level=0)
continue
is_active_and_deleting = ((instance.vm_state == 'active') and (instance.task_state == 'deleting'))
is_zombie_vm = ((instance.vm_state != 'active') and timeutils.is_older_than(instance.updated_at, CONF.zombie_instance_updated_at_window))
if (is_active_and_deleting or is_zombie_vm):
orphaned_instances.append((vm_ref, vm_rec, instance))
return orphaned_instances
| [
"def",
"find_orphaned_instances",
"(",
"xenapi",
")",
":",
"ctxt",
"=",
"context",
".",
"get_admin_context",
"(",
"read_deleted",
"=",
"'only'",
")",
"orphaned_instances",
"=",
"[",
"]",
"for",
"(",
"vm_ref",
",",
"vm_rec",
")",
"in",
"_get_applicable_vm_recs",
"(",
"xenapi",
")",
":",
"try",
":",
"uuid",
"=",
"vm_rec",
"[",
"'other_config'",
"]",
"[",
"'nova_uuid'",
"]",
"instance",
"=",
"db",
".",
"api",
".",
"instance_get_by_uuid",
"(",
"ctxt",
",",
"uuid",
")",
"except",
"(",
"KeyError",
",",
"exception",
".",
"InstanceNotFound",
")",
":",
"print_xen_object",
"(",
"'INFO: Ignoring VM'",
",",
"vm_rec",
",",
"indent_level",
"=",
"0",
")",
"continue",
"is_active_and_deleting",
"=",
"(",
"(",
"instance",
".",
"vm_state",
"==",
"'active'",
")",
"and",
"(",
"instance",
".",
"task_state",
"==",
"'deleting'",
")",
")",
"is_zombie_vm",
"=",
"(",
"(",
"instance",
".",
"vm_state",
"!=",
"'active'",
")",
"and",
"timeutils",
".",
"is_older_than",
"(",
"instance",
".",
"updated_at",
",",
"CONF",
".",
"zombie_instance_updated_at_window",
")",
")",
"if",
"(",
"is_active_and_deleting",
"or",
"is_zombie_vm",
")",
":",
"orphaned_instances",
".",
"append",
"(",
"(",
"vm_ref",
",",
"vm_rec",
",",
"instance",
")",
")",
"return",
"orphaned_instances"
] | find and return a list of orphaned instances . | train | false |
18,228 | def tamper(payload, **kwargs):
retVal = payload
warnMsg = "you should consider usage of switch '--no-cast' along with "
warnMsg += ("tamper script '%s'" % os.path.basename(__file__).split('.')[0])
singleTimeWarnMessage(warnMsg)
match = re.search('(?i)MID\\((.+?)\\s*,\\s*(\\d+)\\s*\\,\\s*(\\d+)\\s*\\)', (payload or ''))
if match:
retVal = retVal.replace(match.group(0), ('MID(%s FROM %s FOR %s)' % (match.group(1), match.group(2), match.group(3))))
return retVal
| [
"def",
"tamper",
"(",
"payload",
",",
"**",
"kwargs",
")",
":",
"retVal",
"=",
"payload",
"warnMsg",
"=",
"\"you should consider usage of switch '--no-cast' along with \"",
"warnMsg",
"+=",
"(",
"\"tamper script '%s'\"",
"%",
"os",
".",
"path",
".",
"basename",
"(",
"__file__",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
"singleTimeWarnMessage",
"(",
"warnMsg",
")",
"match",
"=",
"re",
".",
"search",
"(",
"'(?i)MID\\\\((.+?)\\\\s*,\\\\s*(\\\\d+)\\\\s*\\\\,\\\\s*(\\\\d+)\\\\s*\\\\)'",
",",
"(",
"payload",
"or",
"''",
")",
")",
"if",
"match",
":",
"retVal",
"=",
"retVal",
".",
"replace",
"(",
"match",
".",
"group",
"(",
"0",
")",
",",
"(",
"'MID(%s FROM %s FOR %s)'",
"%",
"(",
"match",
".",
"group",
"(",
"1",
")",
",",
"match",
".",
"group",
"(",
"2",
")",
",",
"match",
".",
"group",
"(",
"3",
")",
")",
")",
")",
"return",
"retVal"
] | unicode-url-encodes non-encoded characters in a given payload requirement: * asp * asp . | train | false |
18,229 | def penntreebank2universal(token, tag):
if ((tag == 'IN') and (token.lower() in _subordinating_conjunctions)):
return CONJ
return _penntreebank2universal(token, tag)
| [
"def",
"penntreebank2universal",
"(",
"token",
",",
"tag",
")",
":",
"if",
"(",
"(",
"tag",
"==",
"'IN'",
")",
"and",
"(",
"token",
".",
"lower",
"(",
")",
"in",
"_subordinating_conjunctions",
")",
")",
":",
"return",
"CONJ",
"return",
"_penntreebank2universal",
"(",
"token",
",",
"tag",
")"
] | converts a penn treebank ii tag to a universal tag . | train | false |
18,230 | @dispatch(Expr, Mapping)
def compute(expr, d, return_type=no_default, **kwargs):
_reset_leaves()
optimize_ = kwargs.get('optimize', optimize)
pre_compute_ = kwargs.get('pre_compute', pre_compute)
post_compute_ = kwargs.get('post_compute', post_compute)
(expr2, d2) = swap_resources_into_scope(expr, d)
if pre_compute_:
d3 = dict(((e, pre_compute_(e, dat, **kwargs)) for (e, dat) in d2.items() if (e in expr2)))
else:
d3 = d2
if optimize_:
try:
expr3 = optimize_(expr2, *[v for (e, v) in d3.items() if (e in expr2)])
_d = dict(zip(expr2._leaves(), expr3._leaves()))
d4 = dict(((e._subs(_d), d) for (e, d) in d3.items()))
except NotImplementedError:
expr3 = expr2
d4 = d3
else:
expr3 = expr2
d4 = d3
result = top_then_bottom_then_top_again_etc(expr3, d4, **kwargs)
if post_compute_:
result = post_compute_(expr3, result, scope=d4)
if (return_type is no_default):
msg = "The default behavior of compute will change in version >= 0.11 where the `return_type` parameter will default to 'core'."
warnings.warn(msg, DeprecationWarning)
elif (return_type == 'core'):
result = coerce_core(result, expr.dshape)
elif isinstance(return_type, type):
result = into(return_type, result, dshape=expr3.dshape)
elif (return_type != 'native'):
raise ValueError('Invalid return_type passed to compute: {}'.format(return_type))
return result
| [
"@",
"dispatch",
"(",
"Expr",
",",
"Mapping",
")",
"def",
"compute",
"(",
"expr",
",",
"d",
",",
"return_type",
"=",
"no_default",
",",
"**",
"kwargs",
")",
":",
"_reset_leaves",
"(",
")",
"optimize_",
"=",
"kwargs",
".",
"get",
"(",
"'optimize'",
",",
"optimize",
")",
"pre_compute_",
"=",
"kwargs",
".",
"get",
"(",
"'pre_compute'",
",",
"pre_compute",
")",
"post_compute_",
"=",
"kwargs",
".",
"get",
"(",
"'post_compute'",
",",
"post_compute",
")",
"(",
"expr2",
",",
"d2",
")",
"=",
"swap_resources_into_scope",
"(",
"expr",
",",
"d",
")",
"if",
"pre_compute_",
":",
"d3",
"=",
"dict",
"(",
"(",
"(",
"e",
",",
"pre_compute_",
"(",
"e",
",",
"dat",
",",
"**",
"kwargs",
")",
")",
"for",
"(",
"e",
",",
"dat",
")",
"in",
"d2",
".",
"items",
"(",
")",
"if",
"(",
"e",
"in",
"expr2",
")",
")",
")",
"else",
":",
"d3",
"=",
"d2",
"if",
"optimize_",
":",
"try",
":",
"expr3",
"=",
"optimize_",
"(",
"expr2",
",",
"*",
"[",
"v",
"for",
"(",
"e",
",",
"v",
")",
"in",
"d3",
".",
"items",
"(",
")",
"if",
"(",
"e",
"in",
"expr2",
")",
"]",
")",
"_d",
"=",
"dict",
"(",
"zip",
"(",
"expr2",
".",
"_leaves",
"(",
")",
",",
"expr3",
".",
"_leaves",
"(",
")",
")",
")",
"d4",
"=",
"dict",
"(",
"(",
"(",
"e",
".",
"_subs",
"(",
"_d",
")",
",",
"d",
")",
"for",
"(",
"e",
",",
"d",
")",
"in",
"d3",
".",
"items",
"(",
")",
")",
")",
"except",
"NotImplementedError",
":",
"expr3",
"=",
"expr2",
"d4",
"=",
"d3",
"else",
":",
"expr3",
"=",
"expr2",
"d4",
"=",
"d3",
"result",
"=",
"top_then_bottom_then_top_again_etc",
"(",
"expr3",
",",
"d4",
",",
"**",
"kwargs",
")",
"if",
"post_compute_",
":",
"result",
"=",
"post_compute_",
"(",
"expr3",
",",
"result",
",",
"scope",
"=",
"d4",
")",
"if",
"(",
"return_type",
"is",
"no_default",
")",
":",
"msg",
"=",
"\"The default behavior of compute will change in version >= 0.11 where the `return_type` parameter will default to 'core'.\"",
"warnings",
".",
"warn",
"(",
"msg",
",",
"DeprecationWarning",
")",
"elif",
"(",
"return_type",
"==",
"'core'",
")",
":",
"result",
"=",
"coerce_core",
"(",
"result",
",",
"expr",
".",
"dshape",
")",
"elif",
"isinstance",
"(",
"return_type",
",",
"type",
")",
":",
"result",
"=",
"into",
"(",
"return_type",
",",
"result",
",",
"dshape",
"=",
"expr3",
".",
"dshape",
")",
"elif",
"(",
"return_type",
"!=",
"'native'",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid return_type passed to compute: {}'",
".",
"format",
"(",
"return_type",
")",
")",
"return",
"result"
] | evaluate more than one delayed at once . | train | false |
18,231 | def _get_vnics(host_reference):
return host_reference.config.network.vnic
| [
"def",
"_get_vnics",
"(",
"host_reference",
")",
":",
"return",
"host_reference",
".",
"config",
".",
"network",
".",
"vnic"
] | helper function that returns a list of virtualnics and their information . | train | false |
18,233 | def fake_site(name, default=None):
if (name == 'cybersource_config_key'):
return 'test_site'
else:
return None
| [
"def",
"fake_site",
"(",
"name",
",",
"default",
"=",
"None",
")",
":",
"if",
"(",
"name",
"==",
"'cybersource_config_key'",
")",
":",
"return",
"'test_site'",
"else",
":",
"return",
"None"
] | this is a test mocking function to return a site configuration . | train | false |
18,234 | def pretty_depth_cv(depth):
import cv
depth = pretty_depth(depth)
image = cv.CreateImageHeader((depth.shape[1], depth.shape[0]), cv.IPL_DEPTH_8U, 1)
cv.SetData(image, depth.tostring(), (depth.dtype.itemsize * depth.shape[1]))
return image
| [
"def",
"pretty_depth_cv",
"(",
"depth",
")",
":",
"import",
"cv",
"depth",
"=",
"pretty_depth",
"(",
"depth",
")",
"image",
"=",
"cv",
".",
"CreateImageHeader",
"(",
"(",
"depth",
".",
"shape",
"[",
"1",
"]",
",",
"depth",
".",
"shape",
"[",
"0",
"]",
")",
",",
"cv",
".",
"IPL_DEPTH_8U",
",",
"1",
")",
"cv",
".",
"SetData",
"(",
"image",
",",
"depth",
".",
"tostring",
"(",
")",
",",
"(",
"depth",
".",
"dtype",
".",
"itemsize",
"*",
"depth",
".",
"shape",
"[",
"1",
"]",
")",
")",
"return",
"image"
] | converts depth into a nicer format for display this is abstracted to allow for experimentation with normalization args: depth: a numpy array with 2 bytes per pixel returns: an opencv image whos datatype is unspecified . | train | false |
18,235 | def identify_repo(repo_url):
repo_url_values = repo_url.split(u'+')
if (len(repo_url_values) == 2):
repo_type = repo_url_values[0]
if (repo_type in [u'git', u'hg']):
return (repo_type, repo_url_values[1])
else:
raise UnknownRepoType
elif (u'git' in repo_url):
return (u'git', repo_url)
elif (u'bitbucket' in repo_url):
return (u'hg', repo_url)
else:
raise UnknownRepoType
| [
"def",
"identify_repo",
"(",
"repo_url",
")",
":",
"repo_url_values",
"=",
"repo_url",
".",
"split",
"(",
"u'+'",
")",
"if",
"(",
"len",
"(",
"repo_url_values",
")",
"==",
"2",
")",
":",
"repo_type",
"=",
"repo_url_values",
"[",
"0",
"]",
"if",
"(",
"repo_type",
"in",
"[",
"u'git'",
",",
"u'hg'",
"]",
")",
":",
"return",
"(",
"repo_type",
",",
"repo_url_values",
"[",
"1",
"]",
")",
"else",
":",
"raise",
"UnknownRepoType",
"elif",
"(",
"u'git'",
"in",
"repo_url",
")",
":",
"return",
"(",
"u'git'",
",",
"repo_url",
")",
"elif",
"(",
"u'bitbucket'",
"in",
"repo_url",
")",
":",
"return",
"(",
"u'hg'",
",",
"repo_url",
")",
"else",
":",
"raise",
"UnknownRepoType"
] | determine if repo_url should be treated as a url to a git or hg repo . | train | false |
18,236 | @doctest_depends_on(modules=('numpy', 'matplotlib'))
def plot3d_parametric_line(*args, **kwargs):
args = list(map(sympify, args))
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 3, 1)
series = [Parametric3DLineSeries(*arg, **kwargs) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
| [
"@",
"doctest_depends_on",
"(",
"modules",
"=",
"(",
"'numpy'",
",",
"'matplotlib'",
")",
")",
"def",
"plot3d_parametric_line",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"args",
"=",
"list",
"(",
"map",
"(",
"sympify",
",",
"args",
")",
")",
"show",
"=",
"kwargs",
".",
"pop",
"(",
"'show'",
",",
"True",
")",
"series",
"=",
"[",
"]",
"plot_expr",
"=",
"check_arguments",
"(",
"args",
",",
"3",
",",
"1",
")",
"series",
"=",
"[",
"Parametric3DLineSeries",
"(",
"*",
"arg",
",",
"**",
"kwargs",
")",
"for",
"arg",
"in",
"plot_expr",
"]",
"plots",
"=",
"Plot",
"(",
"*",
"series",
",",
"**",
"kwargs",
")",
"if",
"show",
":",
"plots",
".",
"show",
"(",
")",
"return",
"plots"
] | plots a 3d parametric line plot . | train | false |
18,238 | def get_parameter(parameters, name):
if ((name not in parameters) or (len(parameters[name]) == 0)):
return None
return parameters[name][0]
| [
"def",
"get_parameter",
"(",
"parameters",
",",
"name",
")",
":",
"if",
"(",
"(",
"name",
"not",
"in",
"parameters",
")",
"or",
"(",
"len",
"(",
"parameters",
"[",
"name",
"]",
")",
"==",
"0",
")",
")",
":",
"return",
"None",
"return",
"parameters",
"[",
"name",
"]",
"[",
"0",
"]"
] | returns the value of a cgroup parameter for a container path path to the container parent directory default: /var/lib/lxc . | train | false |
18,239 | def unicode2bytes(x, encoding='utf-8'):
if isinstance(x, text_type):
x = x.encode(encoding)
return x
| [
"def",
"unicode2bytes",
"(",
"x",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"if",
"isinstance",
"(",
"x",
",",
"text_type",
")",
":",
"x",
"=",
"x",
".",
"encode",
"(",
"encoding",
")",
"return",
"x"
] | convert a unicode string to c{bytes} . | train | true |
18,240 | def _process_if_nonempty(processor, name, settings):
name = name.strip()
return (processor(name, settings) if name else _DISCARD)
| [
"def",
"_process_if_nonempty",
"(",
"processor",
",",
"name",
",",
"settings",
")",
":",
"name",
"=",
"name",
".",
"strip",
"(",
")",
"return",
"(",
"processor",
"(",
"name",
",",
"settings",
")",
"if",
"name",
"else",
"_DISCARD",
")"
] | removes extra whitespace from name and applies a metadata processor . | train | false |
18,241 | def str_compat(class_):
if PY2:
if (('__str__' in class_.__dict__) and ('__unicode__' not in class_.__dict__)):
class_.__unicode__ = class_.__str__
class_.__str__ = py_native_string(class_.__unicode__)
if ('__repr__' in class_.__dict__):
class_.__repr__ = py_native_string(class_.__repr__)
return class_
| [
"def",
"str_compat",
"(",
"class_",
")",
":",
"if",
"PY2",
":",
"if",
"(",
"(",
"'__str__'",
"in",
"class_",
".",
"__dict__",
")",
"and",
"(",
"'__unicode__'",
"not",
"in",
"class_",
".",
"__dict__",
")",
")",
":",
"class_",
".",
"__unicode__",
"=",
"class_",
".",
"__str__",
"class_",
".",
"__str__",
"=",
"py_native_string",
"(",
"class_",
".",
"__unicode__",
")",
"if",
"(",
"'__repr__'",
"in",
"class_",
".",
"__dict__",
")",
":",
"class_",
".",
"__repr__",
"=",
"py_native_string",
"(",
"class_",
".",
"__repr__",
")",
"return",
"class_"
] | on python 2 . | train | false |
18,242 | def dg_demo():
grammar = DependencyGrammar.fromstring(u"\n 'scratch' -> 'cats' | 'walls'\n 'walls' -> 'the'\n 'cats' -> 'the'\n ")
print(grammar)
| [
"def",
"dg_demo",
"(",
")",
":",
"grammar",
"=",
"DependencyGrammar",
".",
"fromstring",
"(",
"u\"\\n 'scratch' -> 'cats' | 'walls'\\n 'walls' -> 'the'\\n 'cats' -> 'the'\\n \"",
")",
"print",
"(",
"grammar",
")"
] | a demonstration showing the creation and inspection of a dependencygrammar . | train | false |
18,245 | def sendRobust(signal=Any, sender=Anonymous, *arguments, **named):
responses = []
for receiver in liveReceivers(getAllReceivers(sender, signal)):
try:
response = robustApply(receiver, signal=signal, sender=sender, *arguments, **named)
except Exception as err:
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
| [
"def",
"sendRobust",
"(",
"signal",
"=",
"Any",
",",
"sender",
"=",
"Anonymous",
",",
"*",
"arguments",
",",
"**",
"named",
")",
":",
"responses",
"=",
"[",
"]",
"for",
"receiver",
"in",
"liveReceivers",
"(",
"getAllReceivers",
"(",
"sender",
",",
"signal",
")",
")",
":",
"try",
":",
"response",
"=",
"robustApply",
"(",
"receiver",
",",
"signal",
"=",
"signal",
",",
"sender",
"=",
"sender",
",",
"*",
"arguments",
",",
"**",
"named",
")",
"except",
"Exception",
"as",
"err",
":",
"responses",
".",
"append",
"(",
"(",
"receiver",
",",
"err",
")",
")",
"else",
":",
"responses",
".",
"append",
"(",
"(",
"receiver",
",",
"response",
")",
")",
"return",
"responses"
] | send signal from sender to all connected receivers catching errors signal -- signal value . | train | true |
18,246 | def add_default(version=None, overwrite=False):
plugins = module_loader.all(path_only=True)
plugins = ((os.path.splitext(os.path.basename(p))[0], p) for p in plugins)
plugins = (p for p in plugins if (p[0] not in NONMODULE_MODULE_NAMES))
processed = set()
diagnostic_messages = []
for (name, filename) in (info for info in plugins if (info[0] not in processed)):
try:
write_metadata(filename, DEFAULT_METADATA, version, overwrite)
except ParseError as e:
diagnostic_messages.append(e.args[0])
continue
processed.add(name)
if diagnostic_messages:
pprint(diagnostic_messages)
return 0
| [
"def",
"add_default",
"(",
"version",
"=",
"None",
",",
"overwrite",
"=",
"False",
")",
":",
"plugins",
"=",
"module_loader",
".",
"all",
"(",
"path_only",
"=",
"True",
")",
"plugins",
"=",
"(",
"(",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"p",
")",
")",
"[",
"0",
"]",
",",
"p",
")",
"for",
"p",
"in",
"plugins",
")",
"plugins",
"=",
"(",
"p",
"for",
"p",
"in",
"plugins",
"if",
"(",
"p",
"[",
"0",
"]",
"not",
"in",
"NONMODULE_MODULE_NAMES",
")",
")",
"processed",
"=",
"set",
"(",
")",
"diagnostic_messages",
"=",
"[",
"]",
"for",
"(",
"name",
",",
"filename",
")",
"in",
"(",
"info",
"for",
"info",
"in",
"plugins",
"if",
"(",
"info",
"[",
"0",
"]",
"not",
"in",
"processed",
")",
")",
":",
"try",
":",
"write_metadata",
"(",
"filename",
",",
"DEFAULT_METADATA",
",",
"version",
",",
"overwrite",
")",
"except",
"ParseError",
"as",
"e",
":",
"diagnostic_messages",
".",
"append",
"(",
"e",
".",
"args",
"[",
"0",
"]",
")",
"continue",
"processed",
".",
"add",
"(",
"name",
")",
"if",
"diagnostic_messages",
":",
"pprint",
"(",
"diagnostic_messages",
")",
"return",
"0"
] | implement the subcommand to add default metadata to modules add the default metadata to any plugin which lacks it . | train | false |
18,247 | def scan_host(host):
if (opts.max and (int(opts.max) >= counter['Total'])):
return
host = str(host)
if (host in hosts_to_skip):
return
result = is_vulnerable(host, opts.timeout, opts.port)
message = store_results(host, result)
if opts.verbose:
print message
return message
| [
"def",
"scan_host",
"(",
"host",
")",
":",
"if",
"(",
"opts",
".",
"max",
"and",
"(",
"int",
"(",
"opts",
".",
"max",
")",
">=",
"counter",
"[",
"'Total'",
"]",
")",
")",
":",
"return",
"host",
"=",
"str",
"(",
"host",
")",
"if",
"(",
"host",
"in",
"hosts_to_skip",
")",
":",
"return",
"result",
"=",
"is_vulnerable",
"(",
"host",
",",
"opts",
".",
"timeout",
",",
"opts",
".",
"port",
")",
"message",
"=",
"store_results",
"(",
"host",
",",
"result",
")",
"if",
"opts",
".",
"verbose",
":",
"print",
"message",
"return",
"message"
] | scans a single host . | train | false |
18,248 | def _get_buckets_cache_filename():
cache_dir = _get_cache_dir()
if (not os.path.exists(cache_dir)):
os.makedirs(cache_dir)
return os.path.join(cache_dir, 'buckets_files.cache')
| [
"def",
"_get_buckets_cache_filename",
"(",
")",
":",
"cache_dir",
"=",
"_get_cache_dir",
"(",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cache_dir",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"cache_dir",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"cache_dir",
",",
"'buckets_files.cache'",
")"
] | return the filename of the cache for bucket contents . | train | true |
18,249 | def ensure_test_file_dir_set():
galaxy_test_file_dir = os.environ.get('GALAXY_TEST_FILE_DIR', GALAXY_TEST_FILE_DIR)
os.environ['GALAXY_TEST_FILE_DIR'] = galaxy_test_file_dir
first_test_file_dir = galaxy_test_file_dir.split(',')[0]
return first_test_file_dir
| [
"def",
"ensure_test_file_dir_set",
"(",
")",
":",
"galaxy_test_file_dir",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'GALAXY_TEST_FILE_DIR'",
",",
"GALAXY_TEST_FILE_DIR",
")",
"os",
".",
"environ",
"[",
"'GALAXY_TEST_FILE_DIR'",
"]",
"=",
"galaxy_test_file_dir",
"first_test_file_dir",
"=",
"galaxy_test_file_dir",
".",
"split",
"(",
"','",
")",
"[",
"0",
"]",
"return",
"first_test_file_dir"
] | ensure galaxy_test_file_dir setup in environment for test data resolver . | train | false |
18,250 | def get_image_model():
from django.apps import apps
model_string = get_image_model_string()
try:
return apps.get_model(model_string)
except ValueError:
raise ImproperlyConfigured(u"WAGTAILIMAGES_IMAGE_MODEL must be of the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured((u"WAGTAILIMAGES_IMAGE_MODEL refers to model '%s' that has not been installed" % model_string))
| [
"def",
"get_image_model",
"(",
")",
":",
"from",
"django",
".",
"apps",
"import",
"apps",
"model_string",
"=",
"get_image_model_string",
"(",
")",
"try",
":",
"return",
"apps",
".",
"get_model",
"(",
"model_string",
")",
"except",
"ValueError",
":",
"raise",
"ImproperlyConfigured",
"(",
"u\"WAGTAILIMAGES_IMAGE_MODEL must be of the form 'app_label.model_name'\"",
")",
"except",
"LookupError",
":",
"raise",
"ImproperlyConfigured",
"(",
"(",
"u\"WAGTAILIMAGES_IMAGE_MODEL refers to model '%s' that has not been installed\"",
"%",
"model_string",
")",
")"
] | get the image model from the wagtailimages_image_model setting . | train | false |
18,251 | def set_console_cursor_position(x, y, fd=1):
coord = COORD()
coord.X = x
coord.Y = y
hcon = STDHANDLES[fd]
rtn = SetConsoleCursorPosition(hcon, coord)
return rtn
| [
"def",
"set_console_cursor_position",
"(",
"x",
",",
"y",
",",
"fd",
"=",
"1",
")",
":",
"coord",
"=",
"COORD",
"(",
")",
"coord",
".",
"X",
"=",
"x",
"coord",
".",
"Y",
"=",
"y",
"hcon",
"=",
"STDHANDLES",
"[",
"fd",
"]",
"rtn",
"=",
"SetConsoleCursorPosition",
"(",
"hcon",
",",
"coord",
")",
"return",
"rtn"
] | sets the console cursor position for a standard buffer . | train | false |
18,252 | def _winding_number(T, field):
return int((sum([field(*_values[t][i]) for (t, i) in T]) / field(2)))
| [
"def",
"_winding_number",
"(",
"T",
",",
"field",
")",
":",
"return",
"int",
"(",
"(",
"sum",
"(",
"[",
"field",
"(",
"*",
"_values",
"[",
"t",
"]",
"[",
"i",
"]",
")",
"for",
"(",
"t",
",",
"i",
")",
"in",
"T",
"]",
")",
"/",
"field",
"(",
"2",
")",
")",
")"
] | compute the winding number of the input polynomial . | train | false |
18,253 | def _make_instance_stub(client):
if (client.emulator_host is None):
return make_secure_stub(client.credentials, client.user_agent, bigtable_instance_admin_pb2.BigtableInstanceAdminStub, INSTANCE_ADMIN_HOST)
else:
return make_insecure_stub(bigtable_instance_admin_pb2.BigtableInstanceAdminStub, client.emulator_host)
| [
"def",
"_make_instance_stub",
"(",
"client",
")",
":",
"if",
"(",
"client",
".",
"emulator_host",
"is",
"None",
")",
":",
"return",
"make_secure_stub",
"(",
"client",
".",
"credentials",
",",
"client",
".",
"user_agent",
",",
"bigtable_instance_admin_pb2",
".",
"BigtableInstanceAdminStub",
",",
"INSTANCE_ADMIN_HOST",
")",
"else",
":",
"return",
"make_insecure_stub",
"(",
"bigtable_instance_admin_pb2",
".",
"BigtableInstanceAdminStub",
",",
"client",
".",
"emulator_host",
")"
] | creates grpc stub to make requests to the instance admin api . | train | false |
18,255 | @contextmanager
def catch_stderr():
old_stderr = sys.stderr
sys.stderr = rv = StringIO()
try:
(yield rv)
finally:
sys.stderr = old_stderr
| [
"@",
"contextmanager",
"def",
"catch_stderr",
"(",
")",
":",
"old_stderr",
"=",
"sys",
".",
"stderr",
"sys",
".",
"stderr",
"=",
"rv",
"=",
"StringIO",
"(",
")",
"try",
":",
"(",
"yield",
"rv",
")",
"finally",
":",
"sys",
".",
"stderr",
"=",
"old_stderr"
] | catch stderr in a stringio . | train | false |
18,256 | def test_preload_epochs():
(raw, events, picks) = _get_data()
epochs_preload = Epochs(raw, events[:16], event_id, tmin, tmax, picks=picks, preload=True, reject=reject, flat=flat)
data_preload = epochs_preload.get_data()
epochs = Epochs(raw, events[:16], event_id, tmin, tmax, picks=picks, preload=False, reject=reject, flat=flat)
data = epochs.get_data()
assert_array_equal(data_preload, data)
assert_array_almost_equal(epochs_preload.average().data, epochs.average().data, 18)
| [
"def",
"test_preload_epochs",
"(",
")",
":",
"(",
"raw",
",",
"events",
",",
"picks",
")",
"=",
"_get_data",
"(",
")",
"epochs_preload",
"=",
"Epochs",
"(",
"raw",
",",
"events",
"[",
":",
"16",
"]",
",",
"event_id",
",",
"tmin",
",",
"tmax",
",",
"picks",
"=",
"picks",
",",
"preload",
"=",
"True",
",",
"reject",
"=",
"reject",
",",
"flat",
"=",
"flat",
")",
"data_preload",
"=",
"epochs_preload",
".",
"get_data",
"(",
")",
"epochs",
"=",
"Epochs",
"(",
"raw",
",",
"events",
"[",
":",
"16",
"]",
",",
"event_id",
",",
"tmin",
",",
"tmax",
",",
"picks",
"=",
"picks",
",",
"preload",
"=",
"False",
",",
"reject",
"=",
"reject",
",",
"flat",
"=",
"flat",
")",
"data",
"=",
"epochs",
".",
"get_data",
"(",
")",
"assert_array_equal",
"(",
"data_preload",
",",
"data",
")",
"assert_array_almost_equal",
"(",
"epochs_preload",
".",
"average",
"(",
")",
".",
"data",
",",
"epochs",
".",
"average",
"(",
")",
".",
"data",
",",
"18",
")"
] | test preload of epochs . | train | false |
18,257 | def getNormalByPath(path):
totalNormal = Vector3()
for (pointIndex, point) in enumerate(path):
center = path[((pointIndex + 1) % len(path))]
end = path[((pointIndex + 2) % len(path))]
totalNormal += getNormalWeighted(point, center, end)
return totalNormal.getNormalized()
| [
"def",
"getNormalByPath",
"(",
"path",
")",
":",
"totalNormal",
"=",
"Vector3",
"(",
")",
"for",
"(",
"pointIndex",
",",
"point",
")",
"in",
"enumerate",
"(",
"path",
")",
":",
"center",
"=",
"path",
"[",
"(",
"(",
"pointIndex",
"+",
"1",
")",
"%",
"len",
"(",
"path",
")",
")",
"]",
"end",
"=",
"path",
"[",
"(",
"(",
"pointIndex",
"+",
"2",
")",
"%",
"len",
"(",
"path",
")",
")",
"]",
"totalNormal",
"+=",
"getNormalWeighted",
"(",
"point",
",",
"center",
",",
"end",
")",
"return",
"totalNormal",
".",
"getNormalized",
"(",
")"
] | get normal by path . | train | false |
18,258 | def _start_response(status, headers, exc_info=None):
if (exc_info is not None):
raise exc_info[0], exc_info[1], exc_info[2]
print ('Status: %s' % status)
for (name, val) in headers:
print ('%s: %s' % (name, val))
print
return sys.stdout.write
| [
"def",
"_start_response",
"(",
"status",
",",
"headers",
",",
"exc_info",
"=",
"None",
")",
":",
"if",
"(",
"exc_info",
"is",
"not",
"None",
")",
":",
"raise",
"exc_info",
"[",
"0",
"]",
",",
"exc_info",
"[",
"1",
"]",
",",
"exc_info",
"[",
"2",
"]",
"print",
"(",
"'Status: %s'",
"%",
"status",
")",
"for",
"(",
"name",
",",
"val",
")",
"in",
"headers",
":",
"print",
"(",
"'%s: %s'",
"%",
"(",
"name",
",",
"val",
")",
")",
"print",
"return",
"sys",
".",
"stdout",
".",
"write"
] | a start_response() callable as specified by pep 333 . | train | false |
18,260 | def unbool(element, true=object(), false=object()):
if (element is True):
return true
elif (element is False):
return false
return element
| [
"def",
"unbool",
"(",
"element",
",",
"true",
"=",
"object",
"(",
")",
",",
"false",
"=",
"object",
"(",
")",
")",
":",
"if",
"(",
"element",
"is",
"True",
")",
":",
"return",
"true",
"elif",
"(",
"element",
"is",
"False",
")",
":",
"return",
"false",
"return",
"element"
] | a hack to make true and 1 and false and 0 unique for uniq . | train | true |
18,261 | def test_doc():
obj = compiled.CompiledObject(_evaluator(), ''.__getnewargs__)
assert (obj.doc == '')
| [
"def",
"test_doc",
"(",
")",
":",
"obj",
"=",
"compiled",
".",
"CompiledObject",
"(",
"_evaluator",
"(",
")",
",",
"''",
".",
"__getnewargs__",
")",
"assert",
"(",
"obj",
".",
"doc",
"==",
"''",
")"
] | even compiledobject docs always return empty docstrings - not none . | train | false |
18,262 | def _create_entrance_exam(request, course_key, entrance_exam_minimum_score_pct=None):
if (entrance_exam_minimum_score_pct is None):
entrance_exam_minimum_score_pct = _get_default_entrance_exam_minimum_pct()
course = modulestore().get_course(course_key)
if (course is None):
return HttpResponse(status=400)
parent_locator = unicode(course.location)
created_block = create_xblock(parent_locator=parent_locator, user=request.user, category='chapter', display_name=_('Entrance Exam'), is_entrance_exam=True)
course = modulestore().get_course(course_key)
metadata = {'entrance_exam_enabled': True, 'entrance_exam_minimum_score_pct': unicode(entrance_exam_minimum_score_pct), 'entrance_exam_id': unicode(created_block.location)}
CourseMetadata.update_from_dict(metadata, course, request.user)
create_xblock(parent_locator=unicode(created_block.location), user=request.user, category='sequential', display_name=_('Entrance Exam - Subsection'))
add_entrance_exam_milestone(course.id, created_block)
return HttpResponse(status=201)
| [
"def",
"_create_entrance_exam",
"(",
"request",
",",
"course_key",
",",
"entrance_exam_minimum_score_pct",
"=",
"None",
")",
":",
"if",
"(",
"entrance_exam_minimum_score_pct",
"is",
"None",
")",
":",
"entrance_exam_minimum_score_pct",
"=",
"_get_default_entrance_exam_minimum_pct",
"(",
")",
"course",
"=",
"modulestore",
"(",
")",
".",
"get_course",
"(",
"course_key",
")",
"if",
"(",
"course",
"is",
"None",
")",
":",
"return",
"HttpResponse",
"(",
"status",
"=",
"400",
")",
"parent_locator",
"=",
"unicode",
"(",
"course",
".",
"location",
")",
"created_block",
"=",
"create_xblock",
"(",
"parent_locator",
"=",
"parent_locator",
",",
"user",
"=",
"request",
".",
"user",
",",
"category",
"=",
"'chapter'",
",",
"display_name",
"=",
"_",
"(",
"'Entrance Exam'",
")",
",",
"is_entrance_exam",
"=",
"True",
")",
"course",
"=",
"modulestore",
"(",
")",
".",
"get_course",
"(",
"course_key",
")",
"metadata",
"=",
"{",
"'entrance_exam_enabled'",
":",
"True",
",",
"'entrance_exam_minimum_score_pct'",
":",
"unicode",
"(",
"entrance_exam_minimum_score_pct",
")",
",",
"'entrance_exam_id'",
":",
"unicode",
"(",
"created_block",
".",
"location",
")",
"}",
"CourseMetadata",
".",
"update_from_dict",
"(",
"metadata",
",",
"course",
",",
"request",
".",
"user",
")",
"create_xblock",
"(",
"parent_locator",
"=",
"unicode",
"(",
"created_block",
".",
"location",
")",
",",
"user",
"=",
"request",
".",
"user",
",",
"category",
"=",
"'sequential'",
",",
"display_name",
"=",
"_",
"(",
"'Entrance Exam - Subsection'",
")",
")",
"add_entrance_exam_milestone",
"(",
"course",
".",
"id",
",",
"created_block",
")",
"return",
"HttpResponse",
"(",
"status",
"=",
"201",
")"
] | internal workflow operation to create an entrance exam . | train | false |
18,263 | def useOldServerHashFunction():
global serverHashFunction
serverHashFunction = binascii.crc32
| [
"def",
"useOldServerHashFunction",
"(",
")",
":",
"global",
"serverHashFunction",
"serverHashFunction",
"=",
"binascii",
".",
"crc32"
] | use the old python-memcache server hash function . | train | false |
18,264 | def validate_schema(filename, version=u'1.1'):
if (version not in (u'1.0', u'1.1', u'1.2', u'1.3')):
log.info(u'{0} has version {1}, using schema 1.1'.format(filename, version))
version = u'1.1'
if (version in (u'1.1', u'1.2', u'1.3')):
schema_path = data.get_pkg_data_filename(u'data/VOTable.v{0}.xsd'.format(version))
else:
schema_path = data.get_pkg_data_filename(u'data/VOTable.dtd')
return validate.validate_schema(filename, schema_path)
| [
"def",
"validate_schema",
"(",
"filename",
",",
"version",
"=",
"u'1.1'",
")",
":",
"if",
"(",
"version",
"not",
"in",
"(",
"u'1.0'",
",",
"u'1.1'",
",",
"u'1.2'",
",",
"u'1.3'",
")",
")",
":",
"log",
".",
"info",
"(",
"u'{0} has version {1}, using schema 1.1'",
".",
"format",
"(",
"filename",
",",
"version",
")",
")",
"version",
"=",
"u'1.1'",
"if",
"(",
"version",
"in",
"(",
"u'1.1'",
",",
"u'1.2'",
",",
"u'1.3'",
")",
")",
":",
"schema_path",
"=",
"data",
".",
"get_pkg_data_filename",
"(",
"u'data/VOTable.v{0}.xsd'",
".",
"format",
"(",
"version",
")",
")",
"else",
":",
"schema_path",
"=",
"data",
".",
"get_pkg_data_filename",
"(",
"u'data/VOTable.dtd'",
")",
"return",
"validate",
".",
"validate_schema",
"(",
"filename",
",",
"schema_path",
")"
] | validates an xml file against a schema or dtd . | train | false |
18,265 | def get_model(image_size, subject_names):
feature = Fisherfaces()
classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
return ExtendedPredictableModel(feature=feature, classifier=classifier, image_size=image_size, subject_names=subject_names)
| [
"def",
"get_model",
"(",
"image_size",
",",
"subject_names",
")",
":",
"feature",
"=",
"Fisherfaces",
"(",
")",
"classifier",
"=",
"NearestNeighbor",
"(",
"dist_metric",
"=",
"EuclideanDistance",
"(",
")",
",",
"k",
"=",
"1",
")",
"return",
"ExtendedPredictableModel",
"(",
"feature",
"=",
"feature",
",",
"classifier",
"=",
"classifier",
",",
"image_size",
"=",
"image_size",
",",
"subject_names",
"=",
"subject_names",
")"
] | returns the model matching the given app_label and case-insensitive model_name . | train | false |
18,266 | def should_rx_exclude_path(path, exclude_rxs):
for rx in exclude_rxs:
if rx.search(path):
debug1(('Skipping %r: excluded by rx pattern %r.\n' % (path, rx.pattern)))
return True
return False
| [
"def",
"should_rx_exclude_path",
"(",
"path",
",",
"exclude_rxs",
")",
":",
"for",
"rx",
"in",
"exclude_rxs",
":",
"if",
"rx",
".",
"search",
"(",
"path",
")",
":",
"debug1",
"(",
"(",
"'Skipping %r: excluded by rx pattern %r.\\n'",
"%",
"(",
"path",
",",
"rx",
".",
"pattern",
")",
")",
")",
"return",
"True",
"return",
"False"
] | return true if path matches a regular expression in exclude_rxs . | train | false |
18,269 | def _parallel_predict_log_proba(estimators, estimators_features, X, n_classes):
n_samples = X.shape[0]
log_proba = np.empty((n_samples, n_classes))
log_proba.fill((- np.inf))
all_classes = np.arange(n_classes, dtype=np.int)
for (estimator, features) in zip(estimators, estimators_features):
log_proba_estimator = estimator.predict_log_proba(X[:, features])
if (n_classes == len(estimator.classes_)):
log_proba = np.logaddexp(log_proba, log_proba_estimator)
else:
log_proba[:, estimator.classes_] = np.logaddexp(log_proba[:, estimator.classes_], log_proba_estimator[:, range(len(estimator.classes_))])
missing = np.setdiff1d(all_classes, estimator.classes_)
log_proba[:, missing] = np.logaddexp(log_proba[:, missing], (- np.inf))
return log_proba
| [
"def",
"_parallel_predict_log_proba",
"(",
"estimators",
",",
"estimators_features",
",",
"X",
",",
"n_classes",
")",
":",
"n_samples",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"log_proba",
"=",
"np",
".",
"empty",
"(",
"(",
"n_samples",
",",
"n_classes",
")",
")",
"log_proba",
".",
"fill",
"(",
"(",
"-",
"np",
".",
"inf",
")",
")",
"all_classes",
"=",
"np",
".",
"arange",
"(",
"n_classes",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"for",
"(",
"estimator",
",",
"features",
")",
"in",
"zip",
"(",
"estimators",
",",
"estimators_features",
")",
":",
"log_proba_estimator",
"=",
"estimator",
".",
"predict_log_proba",
"(",
"X",
"[",
":",
",",
"features",
"]",
")",
"if",
"(",
"n_classes",
"==",
"len",
"(",
"estimator",
".",
"classes_",
")",
")",
":",
"log_proba",
"=",
"np",
".",
"logaddexp",
"(",
"log_proba",
",",
"log_proba_estimator",
")",
"else",
":",
"log_proba",
"[",
":",
",",
"estimator",
".",
"classes_",
"]",
"=",
"np",
".",
"logaddexp",
"(",
"log_proba",
"[",
":",
",",
"estimator",
".",
"classes_",
"]",
",",
"log_proba_estimator",
"[",
":",
",",
"range",
"(",
"len",
"(",
"estimator",
".",
"classes_",
")",
")",
"]",
")",
"missing",
"=",
"np",
".",
"setdiff1d",
"(",
"all_classes",
",",
"estimator",
".",
"classes_",
")",
"log_proba",
"[",
":",
",",
"missing",
"]",
"=",
"np",
".",
"logaddexp",
"(",
"log_proba",
"[",
":",
",",
"missing",
"]",
",",
"(",
"-",
"np",
".",
"inf",
")",
")",
"return",
"log_proba"
] | private function used to compute log probabilities within a job . | train | false |
18,270 | def test_cp12403():
superConsole.SendKeys('outputRedirectStart{(}{)}{ENTER}')
superConsole.SendKeys('raise Exception{(}"Some string exception"{)}{ENTER}')
expected = ['Traceback (most recent call last):', ' File "<stdin>", line 1, in <module>', 'Exception: Some string exception', '']
superConsole.SendKeys('outputRedirectStop{(}{)}{ENTER}')
AreEqual(removePrompts(getTestOutput()[0]), [])
errlines = getTestOutput()[1]
for i in xrange(len(errlines)):
Assert(errlines[i].startswith(expected[i]), ((str(errlines) + ' != ') + str(expected)))
| [
"def",
"test_cp12403",
"(",
")",
":",
"superConsole",
".",
"SendKeys",
"(",
"'outputRedirectStart{(}{)}{ENTER}'",
")",
"superConsole",
".",
"SendKeys",
"(",
"'raise Exception{(}\"Some string exception\"{)}{ENTER}'",
")",
"expected",
"=",
"[",
"'Traceback (most recent call last):'",
",",
"' File \"<stdin>\", line 1, in <module>'",
",",
"'Exception: Some string exception'",
",",
"''",
"]",
"superConsole",
".",
"SendKeys",
"(",
"'outputRedirectStop{(}{)}{ENTER}'",
")",
"AreEqual",
"(",
"removePrompts",
"(",
"getTestOutput",
"(",
")",
"[",
"0",
"]",
")",
",",
"[",
"]",
")",
"errlines",
"=",
"getTestOutput",
"(",
")",
"[",
"1",
"]",
"for",
"i",
"in",
"xrange",
"(",
"len",
"(",
"errlines",
")",
")",
":",
"Assert",
"(",
"errlines",
"[",
"i",
"]",
".",
"startswith",
"(",
"expected",
"[",
"i",
"]",
")",
",",
"(",
"(",
"str",
"(",
"errlines",
")",
"+",
"' != '",
")",
"+",
"str",
"(",
"expected",
")",
")",
")"
] | an exception thrown should appear in stderr . | train | false |
18,271 | def programme_project():
s3.prep = (lambda r: ((r.method == 'options') and (r.representation == 's3json')))
return s3_rest_controller()
| [
"def",
"programme_project",
"(",
")",
":",
"s3",
".",
"prep",
"=",
"(",
"lambda",
"r",
":",
"(",
"(",
"r",
".",
"method",
"==",
"'options'",
")",
"and",
"(",
"r",
".",
"representation",
"==",
"'s3json'",
")",
")",
")",
"return",
"s3_rest_controller",
"(",
")"
] | restful controller for programmes <> projects . | train | false |
18,274 | def new(rsa_key):
return PKCS115_SigScheme(rsa_key)
| [
"def",
"new",
"(",
"rsa_key",
")",
":",
"return",
"PKCS115_SigScheme",
"(",
"rsa_key",
")"
] | creates a new store . | train | false |
18,276 | @click.command(u'enable-scheduler')
@pass_context
def enable_scheduler(context):
import frappe.utils.scheduler
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.utils.scheduler.enable_scheduler()
frappe.db.commit()
print u'Enabled for', site
finally:
frappe.destroy()
| [
"@",
"click",
".",
"command",
"(",
"u'enable-scheduler'",
")",
"@",
"pass_context",
"def",
"enable_scheduler",
"(",
"context",
")",
":",
"import",
"frappe",
".",
"utils",
".",
"scheduler",
"for",
"site",
"in",
"context",
".",
"sites",
":",
"try",
":",
"frappe",
".",
"init",
"(",
"site",
"=",
"site",
")",
"frappe",
".",
"connect",
"(",
")",
"frappe",
".",
"utils",
".",
"scheduler",
".",
"enable_scheduler",
"(",
")",
"frappe",
".",
"db",
".",
"commit",
"(",
")",
"print",
"u'Enabled for'",
",",
"site",
"finally",
":",
"frappe",
".",
"destroy",
"(",
")"
] | enable scheduler . | train | false |
18,277 | def storage_service_bucket():
conn = s3.connection.S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
return conn.get_bucket(settings.VIDEO_UPLOAD_PIPELINE['BUCKET'])
| [
"def",
"storage_service_bucket",
"(",
")",
":",
"conn",
"=",
"s3",
".",
"connection",
".",
"S3Connection",
"(",
"settings",
".",
"AWS_ACCESS_KEY_ID",
",",
"settings",
".",
"AWS_SECRET_ACCESS_KEY",
")",
"return",
"conn",
".",
"get_bucket",
"(",
"settings",
".",
"VIDEO_UPLOAD_PIPELINE",
"[",
"'BUCKET'",
"]",
")"
] | returns an s3 bucket for video uploads . | train | false |
18,278 | def set_chmod(path, permissions, report):
try:
os.chmod(path, permissions)
except:
lpath = path.lower()
if (report and ('.appledouble' not in lpath) and ('.ds_store' not in lpath)):
logging.error(T('Cannot change permissions of %s'), clip_path(path))
logging.info('Traceback: ', exc_info=True)
| [
"def",
"set_chmod",
"(",
"path",
",",
"permissions",
",",
"report",
")",
":",
"try",
":",
"os",
".",
"chmod",
"(",
"path",
",",
"permissions",
")",
"except",
":",
"lpath",
"=",
"path",
".",
"lower",
"(",
")",
"if",
"(",
"report",
"and",
"(",
"'.appledouble'",
"not",
"in",
"lpath",
")",
"and",
"(",
"'.ds_store'",
"not",
"in",
"lpath",
")",
")",
":",
"logging",
".",
"error",
"(",
"T",
"(",
"'Cannot change permissions of %s'",
")",
",",
"clip_path",
"(",
"path",
")",
")",
"logging",
".",
"info",
"(",
"'Traceback: '",
",",
"exc_info",
"=",
"True",
")"
] | set permissions on path . | train | false |
18,280 | def read_element_string(stream, size):
return _read(stream, size).decode('ascii')
| [
"def",
"read_element_string",
"(",
"stream",
",",
"size",
")",
":",
"return",
"_read",
"(",
"stream",
",",
"size",
")",
".",
"decode",
"(",
"'ascii'",
")"
] | read the element data of type :data:string . | train | false |
18,282 | @pytest.mark.parametrize('obj', [QPoint(23, 42), QUrl('http://www.qutebrowser.org/')])
def test_serialize(obj):
new_obj = type(obj)()
qtutils.deserialize(qtutils.serialize(obj), new_obj)
assert (new_obj == obj)
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'obj'",
",",
"[",
"QPoint",
"(",
"23",
",",
"42",
")",
",",
"QUrl",
"(",
"'http://www.qutebrowser.org/'",
")",
"]",
")",
"def",
"test_serialize",
"(",
"obj",
")",
":",
"new_obj",
"=",
"type",
"(",
"obj",
")",
"(",
")",
"qtutils",
".",
"deserialize",
"(",
"qtutils",
".",
"serialize",
"(",
"obj",
")",
",",
"new_obj",
")",
"assert",
"(",
"new_obj",
"==",
"obj",
")"
] | test a serialize/deserialize round trip . | train | false |
18,283 | @with_setup(step_runner_environ)
def test_successful_behave_as_step_doesnt_fail():
runnable_step = Step.from_string('Given I have a step which calls the "define a step" step with behave_as')
runnable_step.run(True)
assert_false(runnable_step.failed)
| [
"@",
"with_setup",
"(",
"step_runner_environ",
")",
"def",
"test_successful_behave_as_step_doesnt_fail",
"(",
")",
":",
"runnable_step",
"=",
"Step",
".",
"from_string",
"(",
"'Given I have a step which calls the \"define a step\" step with behave_as'",
")",
"runnable_step",
".",
"run",
"(",
"True",
")",
"assert_false",
"(",
"runnable_step",
".",
"failed",
")"
] | when a step definition calls another step definition with behave_as . | train | false |
18,284 | def uninstall_cache():
_patch_session_factory(OriginalSession)
| [
"def",
"uninstall_cache",
"(",
")",
":",
"_patch_session_factory",
"(",
"OriginalSession",
")"
] | restores requests . | train | false |
18,286 | def get_containers_for_group(inventory, group):
if ('hosts' in inventory[group]):
containers = inventory[group]['hosts']
else:
containers = None
return containers
| [
"def",
"get_containers_for_group",
"(",
"inventory",
",",
"group",
")",
":",
"if",
"(",
"'hosts'",
"in",
"inventory",
"[",
"group",
"]",
")",
":",
"containers",
"=",
"inventory",
"[",
"group",
"]",
"[",
"'hosts'",
"]",
"else",
":",
"containers",
"=",
"None",
"return",
"containers"
] | return containers that belong to a particular group . | train | false |
18,287 | def getRotatedComplexLists(planeAngle, pointLists):
rotatedComplexLists = []
for pointList in pointLists:
rotatedComplexLists.append(getRotatedComplexes(planeAngle, pointList))
return rotatedComplexLists
| [
"def",
"getRotatedComplexLists",
"(",
"planeAngle",
",",
"pointLists",
")",
":",
"rotatedComplexLists",
"=",
"[",
"]",
"for",
"pointList",
"in",
"pointLists",
":",
"rotatedComplexLists",
".",
"append",
"(",
"getRotatedComplexes",
"(",
"planeAngle",
",",
"pointList",
")",
")",
"return",
"rotatedComplexLists"
] | get point lists rotated by the plane angle . | train | false |
18,290 | def make_inovavolumemanager_tests(client_factory):
class Tests(INovaVolumeManagerTestsMixin, TestCase, ):
def setUp(self):
super(Tests, self).setUp()
self.client = client_factory(test_case=self)
return Tests
| [
"def",
"make_inovavolumemanager_tests",
"(",
"client_factory",
")",
":",
"class",
"Tests",
"(",
"INovaVolumeManagerTestsMixin",
",",
"TestCase",
",",
")",
":",
"def",
"setUp",
"(",
"self",
")",
":",
"super",
"(",
"Tests",
",",
"self",
")",
".",
"setUp",
"(",
")",
"self",
".",
"client",
"=",
"client_factory",
"(",
"test_case",
"=",
"self",
")",
"return",
"Tests"
] | build a testcase for verifying that an implementation of inovavolumemanager adheres to that interface . | train | false |
18,292 | def test_resize_icon_poorly():
somepic = get_image_path('mozilla.png')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix='.png', delete=False, dir=settings.TMP_PATH)
shutil.copyfile(somepic, src.name)
src_image = Image.open(src.name)
assert (src_image.size == (339, 128))
resize_icon(src.name, src.name, locally=True)
src_image = Image.open(src.name)
assert (src_image.size == (339, 128))
| [
"def",
"test_resize_icon_poorly",
"(",
")",
":",
"somepic",
"=",
"get_image_path",
"(",
"'mozilla.png'",
")",
"src",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"mode",
"=",
"'r+w+b'",
",",
"suffix",
"=",
"'.png'",
",",
"delete",
"=",
"False",
",",
"dir",
"=",
"settings",
".",
"TMP_PATH",
")",
"shutil",
".",
"copyfile",
"(",
"somepic",
",",
"src",
".",
"name",
")",
"src_image",
"=",
"Image",
".",
"open",
"(",
"src",
".",
"name",
")",
"assert",
"(",
"src_image",
".",
"size",
"==",
"(",
"339",
",",
"128",
")",
")",
"resize_icon",
"(",
"src",
".",
"name",
",",
"src",
".",
"name",
",",
"locally",
"=",
"True",
")",
"src_image",
"=",
"Image",
".",
"open",
"(",
"src",
".",
"name",
")",
"assert",
"(",
"src_image",
".",
"size",
"==",
"(",
"339",
",",
"128",
")",
")"
] | if we attempt to set the src/dst . | train | false |
18,293 | def short_language_code(code=None):
if (code is None):
code = translation.get_language()
pos = code.find(u'-')
if (pos > (-1)):
return code[:pos]
return code
| [
"def",
"short_language_code",
"(",
"code",
"=",
"None",
")",
":",
"if",
"(",
"code",
"is",
"None",
")",
":",
"code",
"=",
"translation",
".",
"get_language",
"(",
")",
"pos",
"=",
"code",
".",
"find",
"(",
"u'-'",
")",
"if",
"(",
"pos",
">",
"(",
"-",
"1",
")",
")",
":",
"return",
"code",
"[",
":",
"pos",
"]",
"return",
"code"
] | extract the short language code from its argument . | train | false |
18,294 | def get_user_email_language(user):
return UserPreference.get_value(user, LANGUAGE_KEY)
| [
"def",
"get_user_email_language",
"(",
"user",
")",
":",
"return",
"UserPreference",
".",
"get_value",
"(",
"user",
",",
"LANGUAGE_KEY",
")"
] | return the language most appropriate for writing emails to user . | train | false |
18,295 | def norm2(a):
if COMPUTE_NORM2:
logging.info(('computing spectral norm of a %s matrix' % str(a.shape)))
return scipy.linalg.eigvalsh(a).max()
else:
return np.nan
| [
"def",
"norm2",
"(",
"a",
")",
":",
"if",
"COMPUTE_NORM2",
":",
"logging",
".",
"info",
"(",
"(",
"'computing spectral norm of a %s matrix'",
"%",
"str",
"(",
"a",
".",
"shape",
")",
")",
")",
"return",
"scipy",
".",
"linalg",
".",
"eigvalsh",
"(",
"a",
")",
".",
"max",
"(",
")",
"else",
":",
"return",
"np",
".",
"nan"
] | spectral norm of a symmetric matrix a . | train | false |
18,296 | def patch_admin(model, admin_site=None):
admin_site = (admin_site or admin.site)
try:
ModelAdmin = admin_site._registry[model].__class__
except KeyError:
raise NotRegistered, ('The model %r has not been registered with the admin site.' % model)
admin_site.unregister(model)
class PatchedModelAdmin(VersionAdmin, ModelAdmin, ):
pass
admin_site.register(model, PatchedModelAdmin)
| [
"def",
"patch_admin",
"(",
"model",
",",
"admin_site",
"=",
"None",
")",
":",
"admin_site",
"=",
"(",
"admin_site",
"or",
"admin",
".",
"site",
")",
"try",
":",
"ModelAdmin",
"=",
"admin_site",
".",
"_registry",
"[",
"model",
"]",
".",
"__class__",
"except",
"KeyError",
":",
"raise",
"NotRegistered",
",",
"(",
"'The model %r has not been registered with the admin site.'",
"%",
"model",
")",
"admin_site",
".",
"unregister",
"(",
"model",
")",
"class",
"PatchedModelAdmin",
"(",
"VersionAdmin",
",",
"ModelAdmin",
",",
")",
":",
"pass",
"admin_site",
".",
"register",
"(",
"model",
",",
"PatchedModelAdmin",
")"
] | enables version control with full admin integration for a model that has already been registered with the django admin site . | train | false |
18,297 | def getFilesForName(name):
if (not os.path.exists(name)):
if containsAny(name, '*?[]'):
files = glob.glob(name)
list = []
for file in files:
list.extend(getFilesForName(file))
return list
name = _get_modpkg_path(name)
if (not name):
return []
if os.path.isdir(name):
list = []
os.path.walk(name, _visit_pyfiles, list)
return list
elif os.path.exists(name):
return [name]
return []
| [
"def",
"getFilesForName",
"(",
"name",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"name",
")",
")",
":",
"if",
"containsAny",
"(",
"name",
",",
"'*?[]'",
")",
":",
"files",
"=",
"glob",
".",
"glob",
"(",
"name",
")",
"list",
"=",
"[",
"]",
"for",
"file",
"in",
"files",
":",
"list",
".",
"extend",
"(",
"getFilesForName",
"(",
"file",
")",
")",
"return",
"list",
"name",
"=",
"_get_modpkg_path",
"(",
"name",
")",
"if",
"(",
"not",
"name",
")",
":",
"return",
"[",
"]",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"name",
")",
":",
"list",
"=",
"[",
"]",
"os",
".",
"path",
".",
"walk",
"(",
"name",
",",
"_visit_pyfiles",
",",
"list",
")",
"return",
"list",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"name",
")",
":",
"return",
"[",
"name",
"]",
"return",
"[",
"]"
] | get a list of module files for a filename . | train | true |
18,298 | def js(*args):
return js_helper('static/scripts/', *args)
| [
"def",
"js",
"(",
"*",
"args",
")",
":",
"return",
"js_helper",
"(",
"'static/scripts/'",
",",
"*",
"args",
")"
] | take a prefix and list of javascript names and return appropriate string of script tags . | train | false |
18,299 | def _create_record_with_sa(engine, resource_type, attributes):
sa_table = db_utils.get_table(engine, 'standardattributes')
sa_record = engine.execute(sa_table.insert().values({'resource_type': resource_type}))
attributes['standard_attr_id'] = sa_record.inserted_primary_key[0]
resource_table = db_utils.get_table(engine, resource_type)
engine.execute(resource_table.insert().values(attributes))
| [
"def",
"_create_record_with_sa",
"(",
"engine",
",",
"resource_type",
",",
"attributes",
")",
":",
"sa_table",
"=",
"db_utils",
".",
"get_table",
"(",
"engine",
",",
"'standardattributes'",
")",
"sa_record",
"=",
"engine",
".",
"execute",
"(",
"sa_table",
".",
"insert",
"(",
")",
".",
"values",
"(",
"{",
"'resource_type'",
":",
"resource_type",
"}",
")",
")",
"attributes",
"[",
"'standard_attr_id'",
"]",
"=",
"sa_record",
".",
"inserted_primary_key",
"[",
"0",
"]",
"resource_table",
"=",
"db_utils",
".",
"get_table",
"(",
"engine",
",",
"resource_type",
")",
"engine",
".",
"execute",
"(",
"resource_table",
".",
"insert",
"(",
")",
".",
"values",
"(",
"attributes",
")",
")"
] | create a record with standard attributes . | train | false |
18,300 | def set_warndays(name, warndays):
pre_info = info(name)
if (warndays == pre_info['warn']):
return True
cmd = 'passwd -w {0} {1}'.format(warndays, name)
__salt__['cmd.run'](cmd, python_shell=False)
post_info = info(name)
if (post_info['warn'] != pre_info['warn']):
return (post_info['warn'] == warndays)
return False
| [
"def",
"set_warndays",
"(",
"name",
",",
"warndays",
")",
":",
"pre_info",
"=",
"info",
"(",
"name",
")",
"if",
"(",
"warndays",
"==",
"pre_info",
"[",
"'warn'",
"]",
")",
":",
"return",
"True",
"cmd",
"=",
"'passwd -w {0} {1}'",
".",
"format",
"(",
"warndays",
",",
"name",
")",
"__salt__",
"[",
"'cmd.run'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"post_info",
"=",
"info",
"(",
"name",
")",
"if",
"(",
"post_info",
"[",
"'warn'",
"]",
"!=",
"pre_info",
"[",
"'warn'",
"]",
")",
":",
"return",
"(",
"post_info",
"[",
"'warn'",
"]",
"==",
"warndays",
")",
"return",
"False"
] | set the number of days of warning before a password change is required . | train | true |
18,302 | def _get_and_verify_data_sizes(data, n_signals=None, n_times=None, times=None):
if (not isinstance(data, (list, tuple))):
raise ValueError('data has to be a list or tuple')
n_signals_tot = 0
for this_data in data:
(this_n_signals, this_n_times) = this_data.shape
if (n_times is not None):
if (this_n_times != n_times):
raise ValueError('all input time series must have the same number of time points')
else:
n_times = this_n_times
n_signals_tot += this_n_signals
if hasattr(this_data, 'times'):
this_times = this_data.times
if (times is not None):
if np.any((times != this_times)):
warn('time scales of input time series do not match')
else:
times = this_times
if (n_signals is not None):
if (n_signals != n_signals_tot):
raise ValueError('the number of time series has to be the same in each epoch')
n_signals = n_signals_tot
return (n_signals, n_times, times)
| [
"def",
"_get_and_verify_data_sizes",
"(",
"data",
",",
"n_signals",
"=",
"None",
",",
"n_times",
"=",
"None",
",",
"times",
"=",
"None",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"data",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"'data has to be a list or tuple'",
")",
"n_signals_tot",
"=",
"0",
"for",
"this_data",
"in",
"data",
":",
"(",
"this_n_signals",
",",
"this_n_times",
")",
"=",
"this_data",
".",
"shape",
"if",
"(",
"n_times",
"is",
"not",
"None",
")",
":",
"if",
"(",
"this_n_times",
"!=",
"n_times",
")",
":",
"raise",
"ValueError",
"(",
"'all input time series must have the same number of time points'",
")",
"else",
":",
"n_times",
"=",
"this_n_times",
"n_signals_tot",
"+=",
"this_n_signals",
"if",
"hasattr",
"(",
"this_data",
",",
"'times'",
")",
":",
"this_times",
"=",
"this_data",
".",
"times",
"if",
"(",
"times",
"is",
"not",
"None",
")",
":",
"if",
"np",
".",
"any",
"(",
"(",
"times",
"!=",
"this_times",
")",
")",
":",
"warn",
"(",
"'time scales of input time series do not match'",
")",
"else",
":",
"times",
"=",
"this_times",
"if",
"(",
"n_signals",
"is",
"not",
"None",
")",
":",
"if",
"(",
"n_signals",
"!=",
"n_signals_tot",
")",
":",
"raise",
"ValueError",
"(",
"'the number of time series has to be the same in each epoch'",
")",
"n_signals",
"=",
"n_signals_tot",
"return",
"(",
"n_signals",
",",
"n_times",
",",
"times",
")"
] | helper function to get and/or verify the data sizes and time scales . | train | false |
18,303 | def _credentials_from_request(request):
if ((oauth2_settings.storage_model is None) or request.user.is_authenticated()):
return get_storage(request).get()
else:
return None
| [
"def",
"_credentials_from_request",
"(",
"request",
")",
":",
"if",
"(",
"(",
"oauth2_settings",
".",
"storage_model",
"is",
"None",
")",
"or",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
")",
":",
"return",
"get_storage",
"(",
"request",
")",
".",
"get",
"(",
")",
"else",
":",
"return",
"None"
] | gets the authorized credentials for this flow . | train | true |
18,305 | @before.harvest
def initial_setup(server):
world.absorb(settings.LETTUCE_SELENIUM_CLIENT, 'LETTUCE_SELENIUM_CLIENT')
if (world.LETTUCE_SELENIUM_CLIENT == 'local'):
browser_driver = getattr(settings, 'LETTUCE_BROWSER', 'chrome')
if (browser_driver == 'chrome'):
desired_capabilities = DesiredCapabilities.CHROME
desired_capabilities['loggingPrefs'] = {'browser': 'ALL'}
else:
desired_capabilities = {}
success = False
num_attempts = 0
while ((not success) and (num_attempts < MAX_VALID_BROWSER_ATTEMPTS)):
try:
if (browser_driver == 'firefox'):
world.browser = Browser(browser_driver)
else:
world.browser = Browser(browser_driver, desired_capabilities=desired_capabilities)
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
world.visit('/')
except WebDriverException:
LOGGER.warn('Error acquiring %s browser, retrying', browser_driver, exc_info=True)
if hasattr(world, 'browser'):
world.browser.quit()
num_attempts += 1
else:
success = True
if (not success):
raise IOError('Could not acquire valid {driver} browser session.'.format(driver=browser_driver))
world.absorb(0, 'IMPLICIT_WAIT')
world.browser.driver.set_window_size(1280, 1024)
elif (world.LETTUCE_SELENIUM_CLIENT == 'saucelabs'):
config = get_saucelabs_username_and_key()
world.browser = Browser('remote', url='http://{}:{}@ondemand.saucelabs.com:80/wd/hub'.format(config['username'], config['access-key']), **make_saucelabs_desired_capabilities())
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
elif (world.LETTUCE_SELENIUM_CLIENT == 'grid'):
world.browser = Browser('remote', url=settings.SELENIUM_GRID.get('URL'), browser=settings.SELENIUM_GRID.get('BROWSER'))
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
else:
raise Exception("Unknown selenium client '{}'".format(world.LETTUCE_SELENIUM_CLIENT))
world.browser.driver.implicitly_wait(world.IMPLICIT_WAIT)
world.absorb(world.browser.driver.session_id, 'jobid')
| [
"@",
"before",
".",
"harvest",
"def",
"initial_setup",
"(",
"server",
")",
":",
"world",
".",
"absorb",
"(",
"settings",
".",
"LETTUCE_SELENIUM_CLIENT",
",",
"'LETTUCE_SELENIUM_CLIENT'",
")",
"if",
"(",
"world",
".",
"LETTUCE_SELENIUM_CLIENT",
"==",
"'local'",
")",
":",
"browser_driver",
"=",
"getattr",
"(",
"settings",
",",
"'LETTUCE_BROWSER'",
",",
"'chrome'",
")",
"if",
"(",
"browser_driver",
"==",
"'chrome'",
")",
":",
"desired_capabilities",
"=",
"DesiredCapabilities",
".",
"CHROME",
"desired_capabilities",
"[",
"'loggingPrefs'",
"]",
"=",
"{",
"'browser'",
":",
"'ALL'",
"}",
"else",
":",
"desired_capabilities",
"=",
"{",
"}",
"success",
"=",
"False",
"num_attempts",
"=",
"0",
"while",
"(",
"(",
"not",
"success",
")",
"and",
"(",
"num_attempts",
"<",
"MAX_VALID_BROWSER_ATTEMPTS",
")",
")",
":",
"try",
":",
"if",
"(",
"browser_driver",
"==",
"'firefox'",
")",
":",
"world",
".",
"browser",
"=",
"Browser",
"(",
"browser_driver",
")",
"else",
":",
"world",
".",
"browser",
"=",
"Browser",
"(",
"browser_driver",
",",
"desired_capabilities",
"=",
"desired_capabilities",
")",
"world",
".",
"browser",
".",
"driver",
".",
"set_script_timeout",
"(",
"GLOBAL_SCRIPT_TIMEOUT",
")",
"world",
".",
"visit",
"(",
"'/'",
")",
"except",
"WebDriverException",
":",
"LOGGER",
".",
"warn",
"(",
"'Error acquiring %s browser, retrying'",
",",
"browser_driver",
",",
"exc_info",
"=",
"True",
")",
"if",
"hasattr",
"(",
"world",
",",
"'browser'",
")",
":",
"world",
".",
"browser",
".",
"quit",
"(",
")",
"num_attempts",
"+=",
"1",
"else",
":",
"success",
"=",
"True",
"if",
"(",
"not",
"success",
")",
":",
"raise",
"IOError",
"(",
"'Could not acquire valid {driver} browser session.'",
".",
"format",
"(",
"driver",
"=",
"browser_driver",
")",
")",
"world",
".",
"absorb",
"(",
"0",
",",
"'IMPLICIT_WAIT'",
")",
"world",
".",
"browser",
".",
"driver",
".",
"set_window_size",
"(",
"1280",
",",
"1024",
")",
"elif",
"(",
"world",
".",
"LETTUCE_SELENIUM_CLIENT",
"==",
"'saucelabs'",
")",
":",
"config",
"=",
"get_saucelabs_username_and_key",
"(",
")",
"world",
".",
"browser",
"=",
"Browser",
"(",
"'remote'",
",",
"url",
"=",
"'http://{}:{}@ondemand.saucelabs.com:80/wd/hub'",
".",
"format",
"(",
"config",
"[",
"'username'",
"]",
",",
"config",
"[",
"'access-key'",
"]",
")",
",",
"**",
"make_saucelabs_desired_capabilities",
"(",
")",
")",
"world",
".",
"absorb",
"(",
"30",
",",
"'IMPLICIT_WAIT'",
")",
"world",
".",
"browser",
".",
"set_script_timeout",
"(",
"GLOBAL_SCRIPT_TIMEOUT",
")",
"elif",
"(",
"world",
".",
"LETTUCE_SELENIUM_CLIENT",
"==",
"'grid'",
")",
":",
"world",
".",
"browser",
"=",
"Browser",
"(",
"'remote'",
",",
"url",
"=",
"settings",
".",
"SELENIUM_GRID",
".",
"get",
"(",
"'URL'",
")",
",",
"browser",
"=",
"settings",
".",
"SELENIUM_GRID",
".",
"get",
"(",
"'BROWSER'",
")",
")",
"world",
".",
"absorb",
"(",
"30",
",",
"'IMPLICIT_WAIT'",
")",
"world",
".",
"browser",
".",
"driver",
".",
"set_script_timeout",
"(",
"GLOBAL_SCRIPT_TIMEOUT",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Unknown selenium client '{}'\"",
".",
"format",
"(",
"world",
".",
"LETTUCE_SELENIUM_CLIENT",
")",
")",
"world",
".",
"browser",
".",
"driver",
".",
"implicitly_wait",
"(",
"world",
".",
"IMPLICIT_WAIT",
")",
"world",
".",
"absorb",
"(",
"world",
".",
"browser",
".",
"driver",
".",
"session_id",
",",
"'jobid'",
")"
] | initial setup for the daemon . | train | false |
18,306 | def init_binaries():
dirpath = os.path.join(CUCKOO_ROOT, 'data', 'monitor', 'latest')
if (not os.path.exists(dirpath)):
raise CuckooStartupError("The binaries used for Windows analysis are updated regularly, independently from the release line. It appears that you're not up-to-date. This can happen when you've just installed Cuckoo or when you've updated your Cuckoo version by pulling the latest changes from our Git repository. In order to get up-to-date, please run the following command: `./utils/community.py -wafb monitor` or `./utils/community.py -waf` if you'd also like to download over 300 Cuckoo signatures.")
if os.path.isfile(dirpath):
monitor = os.path.basename(open(dirpath, 'rb').read().strip())
dirpath = os.path.join(CUCKOO_ROOT, 'data', 'monitor', monitor)
else:
dirpath = None
if (dirpath and (not os.path.isdir(dirpath))):
raise CuckooStartupError("The binaries used for Windows analysis are updated regularly, independently from the release line. It appears that you're not up-to-date. This can happen when you've just installed Cuckoo or when you've updated your Cuckoo version by pulling the latest changes from our Git repository. In order to get up-to-date, please run the following command: `./utils/community.py -wafb monitor` or `./utils/community.py -waf` if you'd also like to download over 300 Cuckoo signatures.")
| [
"def",
"init_binaries",
"(",
")",
":",
"dirpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"CUCKOO_ROOT",
",",
"'data'",
",",
"'monitor'",
",",
"'latest'",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dirpath",
")",
")",
":",
"raise",
"CuckooStartupError",
"(",
"\"The binaries used for Windows analysis are updated regularly, independently from the release line. It appears that you're not up-to-date. This can happen when you've just installed Cuckoo or when you've updated your Cuckoo version by pulling the latest changes from our Git repository. In order to get up-to-date, please run the following command: `./utils/community.py -wafb monitor` or `./utils/community.py -waf` if you'd also like to download over 300 Cuckoo signatures.\"",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"dirpath",
")",
":",
"monitor",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"open",
"(",
"dirpath",
",",
"'rb'",
")",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
")",
"dirpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"CUCKOO_ROOT",
",",
"'data'",
",",
"'monitor'",
",",
"monitor",
")",
"else",
":",
"dirpath",
"=",
"None",
"if",
"(",
"dirpath",
"and",
"(",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"dirpath",
")",
")",
")",
":",
"raise",
"CuckooStartupError",
"(",
"\"The binaries used for Windows analysis are updated regularly, independently from the release line. It appears that you're not up-to-date. This can happen when you've just installed Cuckoo or when you've updated your Cuckoo version by pulling the latest changes from our Git repository. In order to get up-to-date, please run the following command: `./utils/community.py -wafb monitor` or `./utils/community.py -waf` if you'd also like to download over 300 Cuckoo signatures.\"",
")"
] | inform the user about the need to periodically look for new analyzer binaries . | train | false |
18,307 | @frappe.whitelist()
def get_uom_details(item_code, uom, qty):
conversion_factor = get_conversion_factor(item_code, uom).get(u'conversion_factor')
if (not conversion_factor):
frappe.msgprint(_(u'UOM coversion factor required for UOM: {0} in Item: {1}').format(uom, item_code))
ret = {u'uom': u''}
else:
ret = {u'conversion_factor': flt(conversion_factor), u'transfer_qty': (flt(qty) * flt(conversion_factor))}
return ret
| [
"@",
"frappe",
".",
"whitelist",
"(",
")",
"def",
"get_uom_details",
"(",
"item_code",
",",
"uom",
",",
"qty",
")",
":",
"conversion_factor",
"=",
"get_conversion_factor",
"(",
"item_code",
",",
"uom",
")",
".",
"get",
"(",
"u'conversion_factor'",
")",
"if",
"(",
"not",
"conversion_factor",
")",
":",
"frappe",
".",
"msgprint",
"(",
"_",
"(",
"u'UOM coversion factor required for UOM: {0} in Item: {1}'",
")",
".",
"format",
"(",
"uom",
",",
"item_code",
")",
")",
"ret",
"=",
"{",
"u'uom'",
":",
"u''",
"}",
"else",
":",
"ret",
"=",
"{",
"u'conversion_factor'",
":",
"flt",
"(",
"conversion_factor",
")",
",",
"u'transfer_qty'",
":",
"(",
"flt",
"(",
"qty",
")",
"*",
"flt",
"(",
"conversion_factor",
")",
")",
"}",
"return",
"ret"
] | returns dict {"conversion_factor": [value] . | train | false |
18,308 | def arg(*args, **kwargs):
kwargs = dict(((k, v) for (k, v) in six.iteritems(kwargs) if (not k.startswith('__'))))
ret = {'args': args, 'kwargs': kwargs}
return ret
| [
"def",
"arg",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"kwargs",
"=",
"dict",
"(",
"(",
"(",
"k",
",",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
"if",
"(",
"not",
"k",
".",
"startswith",
"(",
"'__'",
")",
")",
")",
")",
"ret",
"=",
"{",
"'args'",
":",
"args",
",",
"'kwargs'",
":",
"kwargs",
"}",
"return",
"ret"
] | decorator for cli args . | train | false |
18,309 | def getTechniqueData(technique=None):
return kb.injection.data.get(technique)
| [
"def",
"getTechniqueData",
"(",
"technique",
"=",
"None",
")",
":",
"return",
"kb",
".",
"injection",
".",
"data",
".",
"get",
"(",
"technique",
")"
] | returns injection data for technique specified . | train | false |
18,310 | def enable_job(name, **kwargs):
ret = {'comment': [], 'result': True}
if (not name):
ret['comment'] = 'Job name is required.'
ret['result'] = False
if (('test' in __opts__) and __opts__['test']):
ret['comment'] = 'Job: {0} would be enabled in schedule.'.format(name)
else:
persist = True
if ('persist' in kwargs):
persist = kwargs['persist']
if (name in list_(show_all=True, where='opts', return_yaml=False)):
event_data = {'name': name, 'func': 'enable_job', 'persist': persist}
elif (name in list_(show_all=True, where='pillar', return_yaml=False)):
event_data = {'name': name, 'where': 'pillar', 'func': 'enable_job', 'persist': False}
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire'](event_data, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_enabled_job_complete', wait=30)
if (event_ret and event_ret['complete']):
schedule = event_ret['schedule']
if ((name in schedule) and schedule[name]['enabled']):
ret['result'] = True
ret['comment'] = 'Enabled Job {0} in schedule.'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Failed to enable job {0} in schedule.'.format(name)
return ret
except KeyError:
ret['comment'] = 'Event module not available. Schedule enable job failed.'
return ret
| [
"def",
"enable_job",
"(",
"name",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"{",
"'comment'",
":",
"[",
"]",
",",
"'result'",
":",
"True",
"}",
"if",
"(",
"not",
"name",
")",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Job name is required.'",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"if",
"(",
"(",
"'test'",
"in",
"__opts__",
")",
"and",
"__opts__",
"[",
"'test'",
"]",
")",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Job: {0} would be enabled in schedule.'",
".",
"format",
"(",
"name",
")",
"else",
":",
"persist",
"=",
"True",
"if",
"(",
"'persist'",
"in",
"kwargs",
")",
":",
"persist",
"=",
"kwargs",
"[",
"'persist'",
"]",
"if",
"(",
"name",
"in",
"list_",
"(",
"show_all",
"=",
"True",
",",
"where",
"=",
"'opts'",
",",
"return_yaml",
"=",
"False",
")",
")",
":",
"event_data",
"=",
"{",
"'name'",
":",
"name",
",",
"'func'",
":",
"'enable_job'",
",",
"'persist'",
":",
"persist",
"}",
"elif",
"(",
"name",
"in",
"list_",
"(",
"show_all",
"=",
"True",
",",
"where",
"=",
"'pillar'",
",",
"return_yaml",
"=",
"False",
")",
")",
":",
"event_data",
"=",
"{",
"'name'",
":",
"name",
",",
"'where'",
":",
"'pillar'",
",",
"'func'",
":",
"'enable_job'",
",",
"'persist'",
":",
"False",
"}",
"else",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Job {0} does not exist.'",
".",
"format",
"(",
"name",
")",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"return",
"ret",
"try",
":",
"eventer",
"=",
"salt",
".",
"utils",
".",
"event",
".",
"get_event",
"(",
"'minion'",
",",
"opts",
"=",
"__opts__",
")",
"res",
"=",
"__salt__",
"[",
"'event.fire'",
"]",
"(",
"event_data",
",",
"'manage_schedule'",
")",
"if",
"res",
":",
"event_ret",
"=",
"eventer",
".",
"get_event",
"(",
"tag",
"=",
"'/salt/minion/minion_schedule_enabled_job_complete'",
",",
"wait",
"=",
"30",
")",
"if",
"(",
"event_ret",
"and",
"event_ret",
"[",
"'complete'",
"]",
")",
":",
"schedule",
"=",
"event_ret",
"[",
"'schedule'",
"]",
"if",
"(",
"(",
"name",
"in",
"schedule",
")",
"and",
"schedule",
"[",
"name",
"]",
"[",
"'enabled'",
"]",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'comment'",
"]",
"=",
"'Enabled Job {0} in schedule.'",
".",
"format",
"(",
"name",
")",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"'Failed to enable job {0} in schedule.'",
".",
"format",
"(",
"name",
")",
"return",
"ret",
"except",
"KeyError",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Event module not available. Schedule enable job failed.'",
"return",
"ret"
] | return true is job is enabled successfully . | train | true |
18,311 | def _bytelist2longBigEndian(list):
imax = (len(list) / 4)
hl = ([0L] * imax)
j = 0
i = 0
while (i < imax):
b0 = (long(ord(list[j])) << 24)
b1 = (long(ord(list[(j + 1)])) << 16)
b2 = (long(ord(list[(j + 2)])) << 8)
b3 = long(ord(list[(j + 3)]))
hl[i] = (((b0 | b1) | b2) | b3)
i = (i + 1)
j = (j + 4)
return hl
| [
"def",
"_bytelist2longBigEndian",
"(",
"list",
")",
":",
"imax",
"=",
"(",
"len",
"(",
"list",
")",
"/",
"4",
")",
"hl",
"=",
"(",
"[",
"0",
"L",
"]",
"*",
"imax",
")",
"j",
"=",
"0",
"i",
"=",
"0",
"while",
"(",
"i",
"<",
"imax",
")",
":",
"b0",
"=",
"(",
"long",
"(",
"ord",
"(",
"list",
"[",
"j",
"]",
")",
")",
"<<",
"24",
")",
"b1",
"=",
"(",
"long",
"(",
"ord",
"(",
"list",
"[",
"(",
"j",
"+",
"1",
")",
"]",
")",
")",
"<<",
"16",
")",
"b2",
"=",
"(",
"long",
"(",
"ord",
"(",
"list",
"[",
"(",
"j",
"+",
"2",
")",
"]",
")",
")",
"<<",
"8",
")",
"b3",
"=",
"long",
"(",
"ord",
"(",
"list",
"[",
"(",
"j",
"+",
"3",
")",
"]",
")",
")",
"hl",
"[",
"i",
"]",
"=",
"(",
"(",
"(",
"b0",
"|",
"b1",
")",
"|",
"b2",
")",
"|",
"b3",
")",
"i",
"=",
"(",
"i",
"+",
"1",
")",
"j",
"=",
"(",
"j",
"+",
"4",
")",
"return",
"hl"
] | transform a list of characters into a list of longs . | train | true |
18,313 | def _apply_scaling_array(data, picks_list, scalings):
scalings = _check_scaling_inputs(data, picks_list, scalings)
if isinstance(scalings, dict):
picks_dict = dict(picks_list)
scalings = [(picks_dict[k], v) for (k, v) in scalings.items() if (k in picks_dict)]
for (idx, scaling) in scalings:
data[idx, :] *= scaling
else:
data *= scalings[:, np.newaxis]
| [
"def",
"_apply_scaling_array",
"(",
"data",
",",
"picks_list",
",",
"scalings",
")",
":",
"scalings",
"=",
"_check_scaling_inputs",
"(",
"data",
",",
"picks_list",
",",
"scalings",
")",
"if",
"isinstance",
"(",
"scalings",
",",
"dict",
")",
":",
"picks_dict",
"=",
"dict",
"(",
"picks_list",
")",
"scalings",
"=",
"[",
"(",
"picks_dict",
"[",
"k",
"]",
",",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"scalings",
".",
"items",
"(",
")",
"if",
"(",
"k",
"in",
"picks_dict",
")",
"]",
"for",
"(",
"idx",
",",
"scaling",
")",
"in",
"scalings",
":",
"data",
"[",
"idx",
",",
":",
"]",
"*=",
"scaling",
"else",
":",
"data",
"*=",
"scalings",
"[",
":",
",",
"np",
".",
"newaxis",
"]"
] | scale data type-dependently for estimation . | train | false |
18,315 | def run_new_comments(limit=1000):
@g.stats.amqp_processor('newcomments_q')
def _run_new_comments(msgs, chan):
fnames = [msg.body for msg in msgs]
comments = Comment._by_fullname(fnames, data=True, return_dict=False)
add_queries([get_all_comments()], insert_items=comments)
bysrid = _by_srid(comments, False)
for (srid, sr_comments) in bysrid.iteritems():
add_queries([_get_sr_comments(srid)], insert_items=sr_comments)
amqp.handle_items('newcomments_q', _run_new_comments, limit=limit)
| [
"def",
"run_new_comments",
"(",
"limit",
"=",
"1000",
")",
":",
"@",
"g",
".",
"stats",
".",
"amqp_processor",
"(",
"'newcomments_q'",
")",
"def",
"_run_new_comments",
"(",
"msgs",
",",
"chan",
")",
":",
"fnames",
"=",
"[",
"msg",
".",
"body",
"for",
"msg",
"in",
"msgs",
"]",
"comments",
"=",
"Comment",
".",
"_by_fullname",
"(",
"fnames",
",",
"data",
"=",
"True",
",",
"return_dict",
"=",
"False",
")",
"add_queries",
"(",
"[",
"get_all_comments",
"(",
")",
"]",
",",
"insert_items",
"=",
"comments",
")",
"bysrid",
"=",
"_by_srid",
"(",
"comments",
",",
"False",
")",
"for",
"(",
"srid",
",",
"sr_comments",
")",
"in",
"bysrid",
".",
"iteritems",
"(",
")",
":",
"add_queries",
"(",
"[",
"_get_sr_comments",
"(",
"srid",
")",
"]",
",",
"insert_items",
"=",
"sr_comments",
")",
"amqp",
".",
"handle_items",
"(",
"'newcomments_q'",
",",
"_run_new_comments",
",",
"limit",
"=",
"limit",
")"
] | add new incoming comments to the /comments page . | train | false |
18,316 | def test_cache_config_change_cache_size(config_stub, tmpdir):
max_cache_size = 1024
config_stub.data = {'storage': {'cache-size': max_cache_size}, 'general': {'private-browsing': False}}
disk_cache = cache.DiskCache(str(tmpdir))
assert (disk_cache.maximumCacheSize() == max_cache_size)
config_stub.set('storage', 'cache-size', (max_cache_size * 2))
assert (disk_cache.maximumCacheSize() == (max_cache_size * 2))
| [
"def",
"test_cache_config_change_cache_size",
"(",
"config_stub",
",",
"tmpdir",
")",
":",
"max_cache_size",
"=",
"1024",
"config_stub",
".",
"data",
"=",
"{",
"'storage'",
":",
"{",
"'cache-size'",
":",
"max_cache_size",
"}",
",",
"'general'",
":",
"{",
"'private-browsing'",
":",
"False",
"}",
"}",
"disk_cache",
"=",
"cache",
".",
"DiskCache",
"(",
"str",
"(",
"tmpdir",
")",
")",
"assert",
"(",
"disk_cache",
".",
"maximumCacheSize",
"(",
")",
"==",
"max_cache_size",
")",
"config_stub",
".",
"set",
"(",
"'storage'",
",",
"'cache-size'",
",",
"(",
"max_cache_size",
"*",
"2",
")",
")",
"assert",
"(",
"disk_cache",
".",
"maximumCacheSize",
"(",
")",
"==",
"(",
"max_cache_size",
"*",
"2",
")",
")"
] | change cache size and emit signal to trigger on_config_changed . | train | false |
18,318 | @pytest.mark.models
def test_consistency_bug(EN):
tokens = EN(u'Where rap essentially went mainstream, illustrated by seminal Public Enemy, Beastie Boys and L.L. Cool J. tracks.')
tokens = EN(u'Charity and other short-term aid have buoyed them so far, and a tax-relief bill working its way through Congress would help. But the September 11 Victim Compensation Fund, enacted by Congress to discourage people from filing lawsuits, will determine the shape of their lives for years to come.\n\n', entity=False)
tokens.ents += tuple(EN.matcher(tokens))
EN.entity(tokens)
| [
"@",
"pytest",
".",
"mark",
".",
"models",
"def",
"test_consistency_bug",
"(",
"EN",
")",
":",
"tokens",
"=",
"EN",
"(",
"u'Where rap essentially went mainstream, illustrated by seminal Public Enemy, Beastie Boys and L.L. Cool J. tracks.'",
")",
"tokens",
"=",
"EN",
"(",
"u'Charity and other short-term aid have buoyed them so far, and a tax-relief bill working its way through Congress would help. But the September 11 Victim Compensation Fund, enacted by Congress to discourage people from filing lawsuits, will determine the shape of their lives for years to come.\\n\\n'",
",",
"entity",
"=",
"False",
")",
"tokens",
".",
"ents",
"+=",
"tuple",
"(",
"EN",
".",
"matcher",
"(",
"tokens",
")",
")",
"EN",
".",
"entity",
"(",
"tokens",
")"
] | test an arbitrary sequence-consistency bug encountered during speed test . | train | false |
18,320 | def img_as_int(image, force_copy=False):
return convert(image, np.int16, force_copy)
| [
"def",
"img_as_int",
"(",
"image",
",",
"force_copy",
"=",
"False",
")",
":",
"return",
"convert",
"(",
"image",
",",
"np",
".",
"int16",
",",
"force_copy",
")"
] | convert an image to 16-bit signed integer format . | train | false |
18,322 | @utils.arg('--tenant', dest='tenant', metavar='<tenant>', nargs='?', help=_('Display information from single tenant (Admin only).'))
@utils.arg('--reserved', dest='reserved', action='store_true', default=False, help=_('Include reservations count.'))
def do_limits(cs, args):
limits = cs.limits.get(args.reserved, args.tenant)
_print_rate_limits(limits.rate)
_print_absolute_limits(limits.absolute)
| [
"@",
"utils",
".",
"arg",
"(",
"'--tenant'",
",",
"dest",
"=",
"'tenant'",
",",
"metavar",
"=",
"'<tenant>'",
",",
"nargs",
"=",
"'?'",
",",
"help",
"=",
"_",
"(",
"'Display information from single tenant (Admin only).'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'--reserved'",
",",
"dest",
"=",
"'reserved'",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"_",
"(",
"'Include reservations count.'",
")",
")",
"def",
"do_limits",
"(",
"cs",
",",
"args",
")",
":",
"limits",
"=",
"cs",
".",
"limits",
".",
"get",
"(",
"args",
".",
"reserved",
",",
"args",
".",
"tenant",
")",
"_print_rate_limits",
"(",
"limits",
".",
"rate",
")",
"_print_absolute_limits",
"(",
"limits",
".",
"absolute",
")"
] | print rate and absolute limits . | train | false |
18,323 | def partition_suite_by_type(suite, classes, bins, reverse=False):
suite_class = type(suite)
if reverse:
suite = reversed(tuple(suite))
for test in suite:
if isinstance(test, suite_class):
partition_suite_by_type(test, classes, bins, reverse=reverse)
else:
for i in range(len(classes)):
if isinstance(test, classes[i]):
bins[i].add(test)
break
else:
bins[(-1)].add(test)
| [
"def",
"partition_suite_by_type",
"(",
"suite",
",",
"classes",
",",
"bins",
",",
"reverse",
"=",
"False",
")",
":",
"suite_class",
"=",
"type",
"(",
"suite",
")",
"if",
"reverse",
":",
"suite",
"=",
"reversed",
"(",
"tuple",
"(",
"suite",
")",
")",
"for",
"test",
"in",
"suite",
":",
"if",
"isinstance",
"(",
"test",
",",
"suite_class",
")",
":",
"partition_suite_by_type",
"(",
"test",
",",
"classes",
",",
"bins",
",",
"reverse",
"=",
"reverse",
")",
"else",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"classes",
")",
")",
":",
"if",
"isinstance",
"(",
"test",
",",
"classes",
"[",
"i",
"]",
")",
":",
"bins",
"[",
"i",
"]",
".",
"add",
"(",
"test",
")",
"break",
"else",
":",
"bins",
"[",
"(",
"-",
"1",
")",
"]",
".",
"add",
"(",
"test",
")"
] | partitions a test suite by test type . | train | false |
18,324 | def linear_harvey_collier(res):
rr = recursive_olsresiduals(res, skip=3, alpha=0.95)
from scipy import stats
return stats.ttest_1samp(rr[3][3:], 0)
| [
"def",
"linear_harvey_collier",
"(",
"res",
")",
":",
"rr",
"=",
"recursive_olsresiduals",
"(",
"res",
",",
"skip",
"=",
"3",
",",
"alpha",
"=",
"0.95",
")",
"from",
"scipy",
"import",
"stats",
"return",
"stats",
".",
"ttest_1samp",
"(",
"rr",
"[",
"3",
"]",
"[",
"3",
":",
"]",
",",
"0",
")"
] | harvey collier test for linearity the null hypothesis is that the regression is correctly modeled as linear . | train | false |
18,325 | def scan_mail_log_line(line, collector):
m = re.match('(\\S+ \\d+ \\d+:\\d+:\\d+) (\\S+) (\\S+?)(\\[\\d+\\])?: (.*)', line)
if (not m):
return
(date, system, service, pid, log) = m.groups()
date = dateutil.parser.parse(date)
if (service == 'dovecot'):
scan_dovecot_line(date, log, collector)
elif (service == 'postgrey'):
scan_postgrey_line(date, log, collector)
elif (service == 'postfix/smtpd'):
scan_postfix_smtpd_line(date, log, collector)
elif (service == 'postfix/cleanup'):
scan_postfix_cleanup_line(date, log, collector)
elif (service == 'postfix/submission/smtpd'):
scan_postfix_submission_line(date, log, collector)
elif (service in ('postfix/qmgr', 'postfix/pickup', 'postfix/cleanup', 'postfix/scache', 'spampd', 'postfix/anvil', 'postfix/master', 'opendkim', 'postfix/lmtp', 'postfix/tlsmgr')):
pass
else:
collector['other-services'].add(service)
| [
"def",
"scan_mail_log_line",
"(",
"line",
",",
"collector",
")",
":",
"m",
"=",
"re",
".",
"match",
"(",
"'(\\\\S+ \\\\d+ \\\\d+:\\\\d+:\\\\d+) (\\\\S+) (\\\\S+?)(\\\\[\\\\d+\\\\])?: (.*)'",
",",
"line",
")",
"if",
"(",
"not",
"m",
")",
":",
"return",
"(",
"date",
",",
"system",
",",
"service",
",",
"pid",
",",
"log",
")",
"=",
"m",
".",
"groups",
"(",
")",
"date",
"=",
"dateutil",
".",
"parser",
".",
"parse",
"(",
"date",
")",
"if",
"(",
"service",
"==",
"'dovecot'",
")",
":",
"scan_dovecot_line",
"(",
"date",
",",
"log",
",",
"collector",
")",
"elif",
"(",
"service",
"==",
"'postgrey'",
")",
":",
"scan_postgrey_line",
"(",
"date",
",",
"log",
",",
"collector",
")",
"elif",
"(",
"service",
"==",
"'postfix/smtpd'",
")",
":",
"scan_postfix_smtpd_line",
"(",
"date",
",",
"log",
",",
"collector",
")",
"elif",
"(",
"service",
"==",
"'postfix/cleanup'",
")",
":",
"scan_postfix_cleanup_line",
"(",
"date",
",",
"log",
",",
"collector",
")",
"elif",
"(",
"service",
"==",
"'postfix/submission/smtpd'",
")",
":",
"scan_postfix_submission_line",
"(",
"date",
",",
"log",
",",
"collector",
")",
"elif",
"(",
"service",
"in",
"(",
"'postfix/qmgr'",
",",
"'postfix/pickup'",
",",
"'postfix/cleanup'",
",",
"'postfix/scache'",
",",
"'spampd'",
",",
"'postfix/anvil'",
",",
"'postfix/master'",
",",
"'opendkim'",
",",
"'postfix/lmtp'",
",",
"'postfix/tlsmgr'",
")",
")",
":",
"pass",
"else",
":",
"collector",
"[",
"'other-services'",
"]",
".",
"add",
"(",
"service",
")"
] | scan a log line and extract interesting data . | train | false |
18,326 | def partial_resids(results, focus_exog):
model = results.model
resid = (model.endog - results.predict())
if isinstance(model, (GLM, GEE)):
resid *= model.family.link.deriv(results.fittedvalues)
elif isinstance(model, (OLS, GLS, WLS)):
pass
else:
raise ValueError(("Partial residuals for '%s' not implemented." % type(model)))
if (type(focus_exog) is str):
focus_col = model.exog_names.index(focus_exog)
else:
focus_col = focus_exog
focus_val = (results.params[focus_col] * model.exog[:, focus_col])
return (focus_val + resid)
| [
"def",
"partial_resids",
"(",
"results",
",",
"focus_exog",
")",
":",
"model",
"=",
"results",
".",
"model",
"resid",
"=",
"(",
"model",
".",
"endog",
"-",
"results",
".",
"predict",
"(",
")",
")",
"if",
"isinstance",
"(",
"model",
",",
"(",
"GLM",
",",
"GEE",
")",
")",
":",
"resid",
"*=",
"model",
".",
"family",
".",
"link",
".",
"deriv",
"(",
"results",
".",
"fittedvalues",
")",
"elif",
"isinstance",
"(",
"model",
",",
"(",
"OLS",
",",
"GLS",
",",
"WLS",
")",
")",
":",
"pass",
"else",
":",
"raise",
"ValueError",
"(",
"(",
"\"Partial residuals for '%s' not implemented.\"",
"%",
"type",
"(",
"model",
")",
")",
")",
"if",
"(",
"type",
"(",
"focus_exog",
")",
"is",
"str",
")",
":",
"focus_col",
"=",
"model",
".",
"exog_names",
".",
"index",
"(",
"focus_exog",
")",
"else",
":",
"focus_col",
"=",
"focus_exog",
"focus_val",
"=",
"(",
"results",
".",
"params",
"[",
"focus_col",
"]",
"*",
"model",
".",
"exog",
"[",
":",
",",
"focus_col",
"]",
")",
"return",
"(",
"focus_val",
"+",
"resid",
")"
] | returns partial residuals for a fitted model with respect to a focus predictor . | train | false |
18,328 | def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
| [
"def",
"eagerload_all",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"joinedload_all",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] | a synonym for :func:joinedload_all() . | train | false |
18,329 | def build_branches(project, branch_list):
for branch in branch_list:
versions = project.versions_from_branch_name(branch)
to_build = set()
not_building = set()
for version in versions:
log.info(('(Branch Build) Processing %s:%s' % (project.slug, version.slug)))
ret = _build_version(project, version.slug, already_built=to_build)
if ret:
to_build.add(ret)
else:
not_building.add(version.slug)
return (to_build, not_building)
| [
"def",
"build_branches",
"(",
"project",
",",
"branch_list",
")",
":",
"for",
"branch",
"in",
"branch_list",
":",
"versions",
"=",
"project",
".",
"versions_from_branch_name",
"(",
"branch",
")",
"to_build",
"=",
"set",
"(",
")",
"not_building",
"=",
"set",
"(",
")",
"for",
"version",
"in",
"versions",
":",
"log",
".",
"info",
"(",
"(",
"'(Branch Build) Processing %s:%s'",
"%",
"(",
"project",
".",
"slug",
",",
"version",
".",
"slug",
")",
")",
")",
"ret",
"=",
"_build_version",
"(",
"project",
",",
"version",
".",
"slug",
",",
"already_built",
"=",
"to_build",
")",
"if",
"ret",
":",
"to_build",
".",
"add",
"(",
"ret",
")",
"else",
":",
"not_building",
".",
"add",
"(",
"version",
".",
"slug",
")",
"return",
"(",
"to_build",
",",
"not_building",
")"
] | build the branches for a specific project . | train | false |
18,330 | def _read_cookie(cookie_path, is_safecookie):
if (not os.path.exists(cookie_path)):
exc_msg = ("Authentication failed: '%s' doesn't exist" % cookie_path)
raise UnreadableCookieFile(exc_msg, cookie_path, is_safecookie)
auth_cookie_size = os.path.getsize(cookie_path)
if (auth_cookie_size != 32):
exc_msg = ("Authentication failed: authentication cookie '%s' is the wrong size (%i bytes instead of 32)" % (cookie_path, auth_cookie_size))
raise IncorrectCookieSize(exc_msg, cookie_path, is_safecookie)
try:
with open(cookie_path, 'rb', 0) as f:
return f.read()
except IOError as exc:
exc_msg = ("Authentication failed: unable to read '%s' (%s)" % (cookie_path, exc))
raise UnreadableCookieFile(exc_msg, cookie_path, is_safecookie)
| [
"def",
"_read_cookie",
"(",
"cookie_path",
",",
"is_safecookie",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cookie_path",
")",
")",
":",
"exc_msg",
"=",
"(",
"\"Authentication failed: '%s' doesn't exist\"",
"%",
"cookie_path",
")",
"raise",
"UnreadableCookieFile",
"(",
"exc_msg",
",",
"cookie_path",
",",
"is_safecookie",
")",
"auth_cookie_size",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"cookie_path",
")",
"if",
"(",
"auth_cookie_size",
"!=",
"32",
")",
":",
"exc_msg",
"=",
"(",
"\"Authentication failed: authentication cookie '%s' is the wrong size (%i bytes instead of 32)\"",
"%",
"(",
"cookie_path",
",",
"auth_cookie_size",
")",
")",
"raise",
"IncorrectCookieSize",
"(",
"exc_msg",
",",
"cookie_path",
",",
"is_safecookie",
")",
"try",
":",
"with",
"open",
"(",
"cookie_path",
",",
"'rb'",
",",
"0",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")",
"except",
"IOError",
"as",
"exc",
":",
"exc_msg",
"=",
"(",
"\"Authentication failed: unable to read '%s' (%s)\"",
"%",
"(",
"cookie_path",
",",
"exc",
")",
")",
"raise",
"UnreadableCookieFile",
"(",
"exc_msg",
",",
"cookie_path",
",",
"is_safecookie",
")"
] | provides the contents of a given cookie file . | train | false |
18,331 | def get_data_filepath(name):
dr = data_root()
if (not os.path.exists(dr)):
os.makedirs(dr)
return os.path.join(dr, name)
| [
"def",
"get_data_filepath",
"(",
"name",
")",
":",
"dr",
"=",
"data_root",
"(",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dr",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"dr",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"dr",
",",
"name",
")"
] | returns a handle to data file . | train | true |
18,332 | def find_free_port(interface='127.0.0.1', socket_family=socket.AF_INET, socket_type=socket.SOCK_STREAM):
address = socket.getaddrinfo(interface, 0)[0][4]
probe = socket.socket(socket_family, socket_type)
try:
probe.bind(address)
return probe.getsockname()
finally:
probe.close()
| [
"def",
"find_free_port",
"(",
"interface",
"=",
"'127.0.0.1'",
",",
"socket_family",
"=",
"socket",
".",
"AF_INET",
",",
"socket_type",
"=",
"socket",
".",
"SOCK_STREAM",
")",
":",
"address",
"=",
"socket",
".",
"getaddrinfo",
"(",
"interface",
",",
"0",
")",
"[",
"0",
"]",
"[",
"4",
"]",
"probe",
"=",
"socket",
".",
"socket",
"(",
"socket_family",
",",
"socket_type",
")",
"try",
":",
"probe",
".",
"bind",
"(",
"address",
")",
"return",
"probe",
".",
"getsockname",
"(",
")",
"finally",
":",
"probe",
".",
"close",
"(",
")"
] | return a host free port in the range [start_port . | train | false |
18,333 | def test_round_to_int():
assert (round_to_int(154231, 1000) == 154000)
assert (round_to_int(154231, 10) == 154230)
assert (round_to_int(154231, 100000) == 200000)
assert (round_to_int(154231, 50000) == 150000)
assert (round_to_int(154231, 500) == 154000)
assert (round_to_int(154231, 200) == 154200)
assert (round_to_int(154361, 200) == 154400)
| [
"def",
"test_round_to_int",
"(",
")",
":",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"1000",
")",
"==",
"154000",
")",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"10",
")",
"==",
"154230",
")",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"100000",
")",
"==",
"200000",
")",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"50000",
")",
"==",
"150000",
")",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"500",
")",
"==",
"154000",
")",
"assert",
"(",
"round_to_int",
"(",
"154231",
",",
"200",
")",
"==",
"154200",
")",
"assert",
"(",
"round_to_int",
"(",
"154361",
",",
"200",
")",
"==",
"154400",
")"
] | test round to int function . | train | false |
18,334 | def _set_date_str(info, date_str, original=False):
if date_str:
date_parts = date_str.split('-')
for key in ('year', 'month', 'day'):
if date_parts:
date_part = date_parts.pop(0)
try:
date_num = int(date_part)
except ValueError:
continue
if original:
key = ('original_' + key)
setattr(info, key, date_num)
| [
"def",
"_set_date_str",
"(",
"info",
",",
"date_str",
",",
"original",
"=",
"False",
")",
":",
"if",
"date_str",
":",
"date_parts",
"=",
"date_str",
".",
"split",
"(",
"'-'",
")",
"for",
"key",
"in",
"(",
"'year'",
",",
"'month'",
",",
"'day'",
")",
":",
"if",
"date_parts",
":",
"date_part",
"=",
"date_parts",
".",
"pop",
"(",
"0",
")",
"try",
":",
"date_num",
"=",
"int",
"(",
"date_part",
")",
"except",
"ValueError",
":",
"continue",
"if",
"original",
":",
"key",
"=",
"(",
"'original_'",
"+",
"key",
")",
"setattr",
"(",
"info",
",",
"key",
",",
"date_num",
")"
] | given a yyyy-mm-dd string and an albuminfo object . | train | false |
18,335 | def findElements(parent, matcher):
return findNodes(parent, (lambda n, matcher=matcher: ((getattr(n, 'tagName', None) is not None) and matcher(n))))
| [
"def",
"findElements",
"(",
"parent",
",",
"matcher",
")",
":",
"return",
"findNodes",
"(",
"parent",
",",
"(",
"lambda",
"n",
",",
"matcher",
"=",
"matcher",
":",
"(",
"(",
"getattr",
"(",
"n",
",",
"'tagName'",
",",
"None",
")",
"is",
"not",
"None",
")",
"and",
"matcher",
"(",
"n",
")",
")",
")",
")"
] | return an iterable of the elements which are children of c{parent} for which the predicate c{matcher} returns true . | train | false |
18,336 | def current_line_number():
import inspect
return inspect.currentframe().f_back.f_lineno
| [
"def",
"current_line_number",
"(",
")",
":",
"import",
"inspect",
"return",
"inspect",
".",
"currentframe",
"(",
")",
".",
"f_back",
".",
"f_lineno"
] | returns the current line number in our program . | train | false |
18,337 | def p_command_dim(p):
p[0] = ('DIM', p[2])
| [
"def",
"p_command_dim",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"(",
"'DIM'",
",",
"p",
"[",
"2",
"]",
")"
] | command : dim dimlist . | train | false |
18,340 | def _changes(cur, dns_proto, dns_servers, ip_proto, ip_addrs, gateway):
changes = {}
cur_dns_proto = ('static' if ('Statically Configured DNS Servers' in cur) else 'dhcp')
if (cur_dns_proto == 'static'):
cur_dns_servers = cur['Statically Configured DNS Servers']
if (set((dns_servers or ['None'])) != set(cur_dns_servers)):
changes['dns_servers'] = dns_servers
elif ('DNS servers configured through DHCP' in cur):
cur_dns_servers = cur['DNS servers configured through DHCP']
if (dns_proto == 'static'):
if (set((dns_servers or ['None'])) != set(cur_dns_servers)):
changes['dns_servers'] = dns_servers
cur_ip_proto = ('static' if (cur['DHCP enabled'] == 'No') else 'dhcp')
cur_ip_addrs = _addrdict_to_ip_addrs(cur.get('ip_addrs', []))
cur_gateway = cur.get('Default Gateway')
if (dns_proto != cur_dns_proto):
changes['dns_proto'] = dns_proto
if (ip_proto != cur_ip_proto):
changes['ip_proto'] = ip_proto
if (set((ip_addrs or [])) != set(cur_ip_addrs)):
if (ip_proto == 'static'):
changes['ip_addrs'] = ip_addrs
if (gateway != cur_gateway):
if (ip_proto == 'static'):
changes['gateway'] = gateway
return changes
| [
"def",
"_changes",
"(",
"cur",
",",
"dns_proto",
",",
"dns_servers",
",",
"ip_proto",
",",
"ip_addrs",
",",
"gateway",
")",
":",
"changes",
"=",
"{",
"}",
"cur_dns_proto",
"=",
"(",
"'static'",
"if",
"(",
"'Statically Configured DNS Servers'",
"in",
"cur",
")",
"else",
"'dhcp'",
")",
"if",
"(",
"cur_dns_proto",
"==",
"'static'",
")",
":",
"cur_dns_servers",
"=",
"cur",
"[",
"'Statically Configured DNS Servers'",
"]",
"if",
"(",
"set",
"(",
"(",
"dns_servers",
"or",
"[",
"'None'",
"]",
")",
")",
"!=",
"set",
"(",
"cur_dns_servers",
")",
")",
":",
"changes",
"[",
"'dns_servers'",
"]",
"=",
"dns_servers",
"elif",
"(",
"'DNS servers configured through DHCP'",
"in",
"cur",
")",
":",
"cur_dns_servers",
"=",
"cur",
"[",
"'DNS servers configured through DHCP'",
"]",
"if",
"(",
"dns_proto",
"==",
"'static'",
")",
":",
"if",
"(",
"set",
"(",
"(",
"dns_servers",
"or",
"[",
"'None'",
"]",
")",
")",
"!=",
"set",
"(",
"cur_dns_servers",
")",
")",
":",
"changes",
"[",
"'dns_servers'",
"]",
"=",
"dns_servers",
"cur_ip_proto",
"=",
"(",
"'static'",
"if",
"(",
"cur",
"[",
"'DHCP enabled'",
"]",
"==",
"'No'",
")",
"else",
"'dhcp'",
")",
"cur_ip_addrs",
"=",
"_addrdict_to_ip_addrs",
"(",
"cur",
".",
"get",
"(",
"'ip_addrs'",
",",
"[",
"]",
")",
")",
"cur_gateway",
"=",
"cur",
".",
"get",
"(",
"'Default Gateway'",
")",
"if",
"(",
"dns_proto",
"!=",
"cur_dns_proto",
")",
":",
"changes",
"[",
"'dns_proto'",
"]",
"=",
"dns_proto",
"if",
"(",
"ip_proto",
"!=",
"cur_ip_proto",
")",
":",
"changes",
"[",
"'ip_proto'",
"]",
"=",
"ip_proto",
"if",
"(",
"set",
"(",
"(",
"ip_addrs",
"or",
"[",
"]",
")",
")",
"!=",
"set",
"(",
"cur_ip_addrs",
")",
")",
":",
"if",
"(",
"ip_proto",
"==",
"'static'",
")",
":",
"changes",
"[",
"'ip_addrs'",
"]",
"=",
"ip_addrs",
"if",
"(",
"gateway",
"!=",
"cur_gateway",
")",
":",
"if",
"(",
"ip_proto",
"==",
"'static'",
")",
":",
"changes",
"[",
"'gateway'",
"]",
"=",
"gateway",
"return",
"changes"
] | return a dict of the changes required for a group if the group is present . | train | true |
18,341 | def _api_test_email(name, output, kwargs):
logging.info('Sending test email')
pack = {}
pack['download'] = ['action 1', 'action 2']
pack['unpack'] = ['action 1', 'action 2']
res = sabnzbd.emailer.endjob(u'I had a d\xe8ja vu', 'unknown', True, os.path.normpath(os.path.join(cfg.complete_dir.get_path(), u'/unknown/I had a d\xe8ja vu')), (123 * MEBI), None, pack, 'my_script', u'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0, test=kwargs)
if (res == 'Email succeeded'):
res = None
return report(output, error=res)
| [
"def",
"_api_test_email",
"(",
"name",
",",
"output",
",",
"kwargs",
")",
":",
"logging",
".",
"info",
"(",
"'Sending test email'",
")",
"pack",
"=",
"{",
"}",
"pack",
"[",
"'download'",
"]",
"=",
"[",
"'action 1'",
",",
"'action 2'",
"]",
"pack",
"[",
"'unpack'",
"]",
"=",
"[",
"'action 1'",
",",
"'action 2'",
"]",
"res",
"=",
"sabnzbd",
".",
"emailer",
".",
"endjob",
"(",
"u'I had a d\\xe8ja vu'",
",",
"'unknown'",
",",
"True",
",",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"cfg",
".",
"complete_dir",
".",
"get_path",
"(",
")",
",",
"u'/unknown/I had a d\\xe8ja vu'",
")",
")",
",",
"(",
"123",
"*",
"MEBI",
")",
",",
"None",
",",
"pack",
",",
"'my_script'",
",",
"u'Line 1\\nLine 2\\nLine 3\\nd\\xe8ja vu\\n'",
",",
"0",
",",
"test",
"=",
"kwargs",
")",
"if",
"(",
"res",
"==",
"'Email succeeded'",
")",
":",
"res",
"=",
"None",
"return",
"report",
"(",
"output",
",",
"error",
"=",
"res",
")"
] | api: send a test email . | train | false |
18,342 | def rfclink(name, rawtext, text, lineno, inliner, options={}, content=[]):
node = nodes.reference(rawtext, ('Section ' + text), refuri=('%s#section-%s' % (base_url, text)))
return ([node], [])
| [
"def",
"rfclink",
"(",
"name",
",",
"rawtext",
",",
"text",
",",
"lineno",
",",
"inliner",
",",
"options",
"=",
"{",
"}",
",",
"content",
"=",
"[",
"]",
")",
":",
"node",
"=",
"nodes",
".",
"reference",
"(",
"rawtext",
",",
"(",
"'Section '",
"+",
"text",
")",
",",
"refuri",
"=",
"(",
"'%s#section-%s'",
"%",
"(",
"base_url",
",",
"text",
")",
")",
")",
"return",
"(",
"[",
"node",
"]",
",",
"[",
"]",
")"
] | link to the oauth2 draft . | train | true |
18,343 | def _dict_helper(desc, row):
return dict(zip([col[0] for col in desc], row))
| [
"def",
"_dict_helper",
"(",
"desc",
",",
"row",
")",
":",
"return",
"dict",
"(",
"zip",
"(",
"[",
"col",
"[",
"0",
"]",
"for",
"col",
"in",
"desc",
"]",
",",
"row",
")",
")"
] | returns a dictionary for the given cursor . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.