id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
47,478 | def wait_for_created(upid, timeout=300):
start_time = time.time()
info = _lookup_proxmox_task(upid)
if (not info):
log.error('wait_for_created: No task information retrieved based on given criteria.')
raise SaltCloudExecutionFailure
while True:
if (('status' in info) and (info['status'] == 'OK')):
log.debug('Host has been created!')
return True
time.sleep(3)
if ((time.time() - start_time) > timeout):
log.debug('Timeout reached while waiting for host to be created')
return False
info = _lookup_proxmox_task(upid)
| [
"def",
"wait_for_created",
"(",
"upid",
",",
"timeout",
"=",
"300",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"info",
"=",
"_lookup_proxmox_task",
"(",
"upid",
")",
"if",
"(",
"not",
"info",
")",
":",
"log",
".",
"error",
"(",
"'wait_for_created: No task information retrieved based on given criteria.'",
")",
"raise",
"SaltCloudExecutionFailure",
"while",
"True",
":",
"if",
"(",
"(",
"'status'",
"in",
"info",
")",
"and",
"(",
"info",
"[",
"'status'",
"]",
"==",
"'OK'",
")",
")",
":",
"log",
".",
"debug",
"(",
"'Host has been created!'",
")",
"return",
"True",
"time",
".",
"sleep",
"(",
"3",
")",
"if",
"(",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
")",
">",
"timeout",
")",
":",
"log",
".",
"debug",
"(",
"'Timeout reached while waiting for host to be created'",
")",
"return",
"False",
"info",
"=",
"_lookup_proxmox_task",
"(",
"upid",
")"
] | wait until a the vm has been created successfully . | train | true |
47,480 | def _render(template, render, renderer, template_dict, opts):
if render:
if (template_dict is None):
template_dict = {}
if (not renderer):
renderer = opts.get('renderer', 'yaml_jinja')
rend = salt.loader.render(opts, {})
blacklist = opts.get('renderer_blacklist')
whitelist = opts.get('renderer_whitelist')
return compile_template(template, rend, renderer, blacklist, whitelist, **template_dict)
with salt.utils.fopen(template, 'r') as fh_:
return fh_.read()
| [
"def",
"_render",
"(",
"template",
",",
"render",
",",
"renderer",
",",
"template_dict",
",",
"opts",
")",
":",
"if",
"render",
":",
"if",
"(",
"template_dict",
"is",
"None",
")",
":",
"template_dict",
"=",
"{",
"}",
"if",
"(",
"not",
"renderer",
")",
":",
"renderer",
"=",
"opts",
".",
"get",
"(",
"'renderer'",
",",
"'yaml_jinja'",
")",
"rend",
"=",
"salt",
".",
"loader",
".",
"render",
"(",
"opts",
",",
"{",
"}",
")",
"blacklist",
"=",
"opts",
".",
"get",
"(",
"'renderer_blacklist'",
")",
"whitelist",
"=",
"opts",
".",
"get",
"(",
"'renderer_whitelist'",
")",
"return",
"compile_template",
"(",
"template",
",",
"rend",
",",
"renderer",
",",
"blacklist",
",",
"whitelist",
",",
"**",
"template_dict",
")",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"template",
",",
"'r'",
")",
"as",
"fh_",
":",
"return",
"fh_",
".",
"read",
"(",
")"
] | renders the template and fires the signal . | train | false |
47,481 | def normalize_1d_index(index):
if isinstance(index, types.SliceType):
return index
elif isinstance(index, types.Integer):
return (types.intp if index.signed else types.uintp)
| [
"def",
"normalize_1d_index",
"(",
"index",
")",
":",
"if",
"isinstance",
"(",
"index",
",",
"types",
".",
"SliceType",
")",
":",
"return",
"index",
"elif",
"isinstance",
"(",
"index",
",",
"types",
".",
"Integer",
")",
":",
"return",
"(",
"types",
".",
"intp",
"if",
"index",
".",
"signed",
"else",
"types",
".",
"uintp",
")"
] | normalize the *index* type for indexing a 1d sequence . | train | false |
47,483 | def test_minify_css():
css = '\n/*\n * Font-sizes from config, override with care\n */\n\n.title {\n font-family: sans;\n\n font-size: 12 ;\n}\n\n.legends .legend text {\n font-family: monospace;\n font-size: 14 ;}\n'
assert (minify_css(css) == '.title{font-family:sans;font-size:12}.legends .legend text{font-family:monospace;font-size:14}')
| [
"def",
"test_minify_css",
"(",
")",
":",
"css",
"=",
"'\\n/*\\n * Font-sizes from config, override with care\\n */\\n\\n.title {\\n font-family: sans;\\n\\n font-size: 12 ;\\n}\\n\\n.legends .legend text {\\n font-family: monospace;\\n font-size: 14 ;}\\n'",
"assert",
"(",
"minify_css",
"(",
"css",
")",
"==",
"'.title{font-family:sans;font-size:12}.legends .legend text{font-family:monospace;font-size:14}'",
")"
] | test css minifier function . | train | false |
47,484 | def variant_prefix(variant):
if (variant is None):
return ''
return (variant + '.')
| [
"def",
"variant_prefix",
"(",
"variant",
")",
":",
"if",
"(",
"variant",
"is",
"None",
")",
":",
"return",
"''",
"return",
"(",
"variant",
"+",
"'.'",
")"
] | return a filename prefix for variant . | train | false |
47,485 | def get_id_from_ns_name(ns_name):
dash_index = ns_name.find('-')
if (0 <= dash_index):
return ns_name[(dash_index + 1):]
| [
"def",
"get_id_from_ns_name",
"(",
"ns_name",
")",
":",
"dash_index",
"=",
"ns_name",
".",
"find",
"(",
"'-'",
")",
"if",
"(",
"0",
"<=",
"dash_index",
")",
":",
"return",
"ns_name",
"[",
"(",
"dash_index",
"+",
"1",
")",
":",
"]"
] | parses identifier from prefix-identifier . | train | false |
47,487 | def put_logging(Bucket, TargetBucket=None, TargetPrefix=None, TargetGrants=None, region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
logstate = {}
targets = {'TargetBucket': TargetBucket, 'TargetGrants': TargetGrants, 'TargetPrefix': TargetPrefix}
for (key, val) in six.iteritems(targets):
if (val is not None):
logstate[key] = val
if logstate:
logstatus = {'LoggingEnabled': logstate}
else:
logstatus = {}
if ((TargetGrants is not None) and isinstance(TargetGrants, six.string_types)):
TargetGrants = json.loads(TargetGrants)
conn.put_bucket_logging(Bucket=Bucket, BucketLoggingStatus=logstatus)
return {'updated': True, 'name': Bucket}
except ClientError as e:
return {'updated': False, 'error': __utils__['boto3.get_error'](e)}
| [
"def",
"put_logging",
"(",
"Bucket",
",",
"TargetBucket",
"=",
"None",
",",
"TargetPrefix",
"=",
"None",
",",
"TargetGrants",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"logstate",
"=",
"{",
"}",
"targets",
"=",
"{",
"'TargetBucket'",
":",
"TargetBucket",
",",
"'TargetGrants'",
":",
"TargetGrants",
",",
"'TargetPrefix'",
":",
"TargetPrefix",
"}",
"for",
"(",
"key",
",",
"val",
")",
"in",
"six",
".",
"iteritems",
"(",
"targets",
")",
":",
"if",
"(",
"val",
"is",
"not",
"None",
")",
":",
"logstate",
"[",
"key",
"]",
"=",
"val",
"if",
"logstate",
":",
"logstatus",
"=",
"{",
"'LoggingEnabled'",
":",
"logstate",
"}",
"else",
":",
"logstatus",
"=",
"{",
"}",
"if",
"(",
"(",
"TargetGrants",
"is",
"not",
"None",
")",
"and",
"isinstance",
"(",
"TargetGrants",
",",
"six",
".",
"string_types",
")",
")",
":",
"TargetGrants",
"=",
"json",
".",
"loads",
"(",
"TargetGrants",
")",
"conn",
".",
"put_bucket_logging",
"(",
"Bucket",
"=",
"Bucket",
",",
"BucketLoggingStatus",
"=",
"logstatus",
")",
"return",
"{",
"'updated'",
":",
"True",
",",
"'name'",
":",
"Bucket",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'updated'",
":",
"False",
",",
"'error'",
":",
"__utils__",
"[",
"'boto3.get_error'",
"]",
"(",
"e",
")",
"}"
] | given a valid config . | train | true |
47,488 | def _izip_longest(*args, **kwds):
fillvalue = kwds.get('fillvalue')
def sentinel(counter=([fillvalue] * (len(args) - 1)).pop):
(yield counter())
fillers = itertools.repeat(fillvalue)
iters = [itertools.chain(it, sentinel(), fillers) for it in args]
try:
for tup in itertools.izip(*iters):
(yield tup)
except IndexError:
pass
| [
"def",
"_izip_longest",
"(",
"*",
"args",
",",
"**",
"kwds",
")",
":",
"fillvalue",
"=",
"kwds",
".",
"get",
"(",
"'fillvalue'",
")",
"def",
"sentinel",
"(",
"counter",
"=",
"(",
"[",
"fillvalue",
"]",
"*",
"(",
"len",
"(",
"args",
")",
"-",
"1",
")",
")",
".",
"pop",
")",
":",
"(",
"yield",
"counter",
"(",
")",
")",
"fillers",
"=",
"itertools",
".",
"repeat",
"(",
"fillvalue",
")",
"iters",
"=",
"[",
"itertools",
".",
"chain",
"(",
"it",
",",
"sentinel",
"(",
")",
",",
"fillers",
")",
"for",
"it",
"in",
"args",
"]",
"try",
":",
"for",
"tup",
"in",
"itertools",
".",
"izip",
"(",
"*",
"iters",
")",
":",
"(",
"yield",
"tup",
")",
"except",
"IndexError",
":",
"pass"
] | taken from python docs URL#itertools . | train | false |
47,493 | def id_to_ec2_snap_id(snapshot_id):
if uuidutils.is_uuid_like(snapshot_id):
ctxt = context.get_admin_context()
int_id = get_int_id_from_snapshot_uuid(ctxt, snapshot_id)
return id_to_ec2_id(int_id, 'snap-%08x')
else:
return id_to_ec2_id(snapshot_id, 'snap-%08x')
| [
"def",
"id_to_ec2_snap_id",
"(",
"snapshot_id",
")",
":",
"if",
"uuidutils",
".",
"is_uuid_like",
"(",
"snapshot_id",
")",
":",
"ctxt",
"=",
"context",
".",
"get_admin_context",
"(",
")",
"int_id",
"=",
"get_int_id_from_snapshot_uuid",
"(",
"ctxt",
",",
"snapshot_id",
")",
"return",
"id_to_ec2_id",
"(",
"int_id",
",",
"'snap-%08x'",
")",
"else",
":",
"return",
"id_to_ec2_id",
"(",
"snapshot_id",
",",
"'snap-%08x'",
")"
] | get or create an ec2 volume id from uuid . | train | false |
47,494 | def is_signature(sig):
return isinstance(sig, (str, tuple, typing.Signature))
| [
"def",
"is_signature",
"(",
"sig",
")",
":",
"return",
"isinstance",
"(",
"sig",
",",
"(",
"str",
",",
"tuple",
",",
"typing",
".",
"Signature",
")",
")"
] | return whether *sig* is a potentially valid signature specification . | train | false |
47,495 | def test_transaction_objects_mapped_for_all_models(db, default_namespace):
assert set(HasRevisions.__subclasses__()).issubset(transaction_objects().values())
| [
"def",
"test_transaction_objects_mapped_for_all_models",
"(",
"db",
",",
"default_namespace",
")",
":",
"assert",
"set",
"(",
"HasRevisions",
".",
"__subclasses__",
"(",
")",
")",
".",
"issubset",
"(",
"transaction_objects",
"(",
")",
".",
"values",
"(",
")",
")"
] | test that all subclasses of hasrevisions are mapped by the transaction_objects() function . | train | false |
47,496 | def sort_string(s):
return (''.join(sorted(list(s))) if s else s)
| [
"def",
"sort_string",
"(",
"s",
")",
":",
"return",
"(",
"''",
".",
"join",
"(",
"sorted",
"(",
"list",
"(",
"s",
")",
")",
")",
"if",
"s",
"else",
"s",
")"
] | a simple little toy to sort a string . | train | false |
47,497 | def CDLHARAMI(barDs, count):
return call_talib_with_ohlc(barDs, count, talib.CDLHARAMI)
| [
"def",
"CDLHARAMI",
"(",
"barDs",
",",
"count",
")",
":",
"return",
"call_talib_with_ohlc",
"(",
"barDs",
",",
"count",
",",
"talib",
".",
"CDLHARAMI",
")"
] | harami pattern . | train | false |
47,498 | def tomorrow(t):
now = time.localtime(t)
ntime = (now[0], now[1], now[2], 0, 0, 0, now[6], now[7], now[8])
return (time.mktime(ntime) + DAY)
| [
"def",
"tomorrow",
"(",
"t",
")",
":",
"now",
"=",
"time",
".",
"localtime",
"(",
"t",
")",
"ntime",
"=",
"(",
"now",
"[",
"0",
"]",
",",
"now",
"[",
"1",
"]",
",",
"now",
"[",
"2",
"]",
",",
"0",
",",
"0",
",",
"0",
",",
"now",
"[",
"6",
"]",
",",
"now",
"[",
"7",
"]",
",",
"now",
"[",
"8",
"]",
")",
"return",
"(",
"time",
".",
"mktime",
"(",
"ntime",
")",
"+",
"DAY",
")"
] | return timestamp for tomorrow . | train | false |
47,499 | def _lstsq_residual(b, n, rhs):
raise NotImplementedError
| [
"def",
"_lstsq_residual",
"(",
"b",
",",
"n",
",",
"rhs",
")",
":",
"raise",
"NotImplementedError"
] | compute the residual from the b scratch space . | train | false |
47,500 | def mkstemp(suffix=None, prefix=None, dir=None, text=False):
(prefix, suffix, dir, output_type) = _sanitize_params(prefix, suffix, dir)
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags, output_type)
| [
"def",
"mkstemp",
"(",
"suffix",
"=",
"None",
",",
"prefix",
"=",
"None",
",",
"dir",
"=",
"None",
",",
"text",
"=",
"False",
")",
":",
"(",
"prefix",
",",
"suffix",
",",
"dir",
",",
"output_type",
")",
"=",
"_sanitize_params",
"(",
"prefix",
",",
"suffix",
",",
"dir",
")",
"if",
"text",
":",
"flags",
"=",
"_text_openflags",
"else",
":",
"flags",
"=",
"_bin_openflags",
"return",
"_mkstemp_inner",
"(",
"dir",
",",
"prefix",
",",
"suffix",
",",
"flags",
",",
"output_type",
")"
] | wrap tempfile . | train | false |
47,502 | def _run_cmd(cmd):
ret = {}
result = __salt__['cmd.run_all'](cmd)
if result.get('stdout'):
ret['stdout'] = result['stdout']
if result.get('stderr'):
ret['stderr'] = result['stderr']
ret['success'] = (result['retcode'] == 0)
return ret
| [
"def",
"_run_cmd",
"(",
"cmd",
")",
":",
"ret",
"=",
"{",
"}",
"result",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"if",
"result",
".",
"get",
"(",
"'stdout'",
")",
":",
"ret",
"[",
"'stdout'",
"]",
"=",
"result",
"[",
"'stdout'",
"]",
"if",
"result",
".",
"get",
"(",
"'stderr'",
")",
":",
"ret",
"[",
"'stderr'",
"]",
"=",
"result",
"[",
"'stderr'",
"]",
"ret",
"[",
"'success'",
"]",
"=",
"(",
"result",
"[",
"'retcode'",
"]",
"==",
"0",
")",
"return",
"ret"
] | run a kapacitor task and return a dictionary of info . | train | false |
47,503 | def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
if target.visited:
return
target.visited = True
target.in_roots = (target in roots)
for back_dep_target in target.back_deps:
_AddCompileTargets(back_dep_target, roots, False, result)
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
target.in_roots |= back_dep_target.in_roots
target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
if (target.in_roots and (target.is_executable or ((not target.added_to_compile_targets) and (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and (not target.is_or_has_linked_ancestor)))):
print ' DCTB DCTB adding to compile targets', target.name, 'executable', target.is_executable, 'added_to_compile_targets', target.added_to_compile_targets, 'add_if_no_ancestor', add_if_no_ancestor, 'requires_build', target.requires_build, 'is_static_library', target.is_static_library, 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
result.add(target)
target.added_to_compile_targets = True
| [
"def",
"_AddCompileTargets",
"(",
"target",
",",
"roots",
",",
"add_if_no_ancestor",
",",
"result",
")",
":",
"if",
"target",
".",
"visited",
":",
"return",
"target",
".",
"visited",
"=",
"True",
"target",
".",
"in_roots",
"=",
"(",
"target",
"in",
"roots",
")",
"for",
"back_dep_target",
"in",
"target",
".",
"back_deps",
":",
"_AddCompileTargets",
"(",
"back_dep_target",
",",
"roots",
",",
"False",
",",
"result",
")",
"target",
".",
"added_to_compile_targets",
"|=",
"back_dep_target",
".",
"added_to_compile_targets",
"target",
".",
"in_roots",
"|=",
"back_dep_target",
".",
"in_roots",
"target",
".",
"is_or_has_linked_ancestor",
"|=",
"back_dep_target",
".",
"is_or_has_linked_ancestor",
"if",
"(",
"target",
".",
"in_roots",
"and",
"(",
"target",
".",
"is_executable",
"or",
"(",
"(",
"not",
"target",
".",
"added_to_compile_targets",
")",
"and",
"(",
"add_if_no_ancestor",
"or",
"target",
".",
"requires_build",
")",
")",
"or",
"(",
"target",
".",
"is_static_library",
"and",
"add_if_no_ancestor",
"and",
"(",
"not",
"target",
".",
"is_or_has_linked_ancestor",
")",
")",
")",
")",
":",
"print",
"' DCTB DCTB adding to compile targets'",
",",
"target",
".",
"name",
",",
"'executable'",
",",
"target",
".",
"is_executable",
",",
"'added_to_compile_targets'",
",",
"target",
".",
"added_to_compile_targets",
",",
"'add_if_no_ancestor'",
",",
"add_if_no_ancestor",
",",
"'requires_build'",
",",
"target",
".",
"requires_build",
",",
"'is_static_library'",
",",
"target",
".",
"is_static_library",
",",
"'is_or_has_linked_ancestor'",
",",
"target",
".",
"is_or_has_linked_ancestor",
"result",
".",
"add",
"(",
"target",
")",
"target",
".",
"added_to_compile_targets",
"=",
"True"
] | recurses through all targets that depend on |target| . | train | false |
47,504 | def dumpPorts(switches):
for switch in switches:
output(('%s ' % switch.name))
for intf in switch.intfList():
port = switch.ports[intf]
output(('%s:%d ' % (intf, port)))
output('\n')
| [
"def",
"dumpPorts",
"(",
"switches",
")",
":",
"for",
"switch",
"in",
"switches",
":",
"output",
"(",
"(",
"'%s '",
"%",
"switch",
".",
"name",
")",
")",
"for",
"intf",
"in",
"switch",
".",
"intfList",
"(",
")",
":",
"port",
"=",
"switch",
".",
"ports",
"[",
"intf",
"]",
"output",
"(",
"(",
"'%s:%d '",
"%",
"(",
"intf",
",",
"port",
")",
")",
")",
"output",
"(",
"'\\n'",
")"
] | dump interface to openflow port mappings for each switch . | train | false |
47,505 | def getSessionTypes(assoc_type):
assoc_to_session = {'HMAC-SHA1': ['DH-SHA1', 'no-encryption'], 'HMAC-SHA256': ['DH-SHA256', 'no-encryption']}
return assoc_to_session.get(assoc_type, [])
| [
"def",
"getSessionTypes",
"(",
"assoc_type",
")",
":",
"assoc_to_session",
"=",
"{",
"'HMAC-SHA1'",
":",
"[",
"'DH-SHA1'",
",",
"'no-encryption'",
"]",
",",
"'HMAC-SHA256'",
":",
"[",
"'DH-SHA256'",
",",
"'no-encryption'",
"]",
"}",
"return",
"assoc_to_session",
".",
"get",
"(",
"assoc_type",
",",
"[",
"]",
")"
] | return the allowed session types for a given association type . | train | false |
47,506 | def _ConvertFormatToQmark(statement, args):
if isinstance(args, dict):
return (statement % collections.defaultdict((lambda : '?')))
elif args:
qmarks = tuple(('?' * len(args)))
return (statement % qmarks)
return statement
| [
"def",
"_ConvertFormatToQmark",
"(",
"statement",
",",
"args",
")",
":",
"if",
"isinstance",
"(",
"args",
",",
"dict",
")",
":",
"return",
"(",
"statement",
"%",
"collections",
".",
"defaultdict",
"(",
"(",
"lambda",
":",
"'?'",
")",
")",
")",
"elif",
"args",
":",
"qmarks",
"=",
"tuple",
"(",
"(",
"'?'",
"*",
"len",
"(",
"args",
")",
")",
")",
"return",
"(",
"statement",
"%",
"qmarks",
")",
"return",
"statement"
] | replaces %s or %s with ? . | train | false |
47,507 | @pytest.mark.parametrize('text, expected', [fixme(('<o>ne two', 'one| two')), ('<o>ne two', 'one |two'), fixme(('<one> two', 'one two|')), ('<one> two', 'one |two'), ('one t<wo>', 'one two|')])
def test_rl_forward_word(text, expected, lineedit, bridge):
lineedit.set_aug_text(text)
bridge.rl_forward_word()
assert (lineedit.aug_text() == expected)
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'text, expected'",
",",
"[",
"fixme",
"(",
"(",
"'<o>ne two'",
",",
"'one| two'",
")",
")",
",",
"(",
"'<o>ne two'",
",",
"'one |two'",
")",
",",
"fixme",
"(",
"(",
"'<one> two'",
",",
"'one two|'",
")",
")",
",",
"(",
"'<one> two'",
",",
"'one |two'",
")",
",",
"(",
"'one t<wo>'",
",",
"'one two|'",
")",
"]",
")",
"def",
"test_rl_forward_word",
"(",
"text",
",",
"expected",
",",
"lineedit",
",",
"bridge",
")",
":",
"lineedit",
".",
"set_aug_text",
"(",
"text",
")",
"bridge",
".",
"rl_forward_word",
"(",
")",
"assert",
"(",
"lineedit",
".",
"aug_text",
"(",
")",
"==",
"expected",
")"
] | test rl_forward_word . | train | false |
47,508 | def verify_password(s1, s2):
if isinstance(s1, unicode):
s1 = s1.encode('utf-8')
if isinstance(s2, unicode):
s2 = s2.encode('utf-8')
(m, f, n, x, s2) = s2.split(':')
return streql(pbkdf2(s1[:1024], x, int(n), (len(s2) / 2), f), s2)
| [
"def",
"verify_password",
"(",
"s1",
",",
"s2",
")",
":",
"if",
"isinstance",
"(",
"s1",
",",
"unicode",
")",
":",
"s1",
"=",
"s1",
".",
"encode",
"(",
"'utf-8'",
")",
"if",
"isinstance",
"(",
"s2",
",",
"unicode",
")",
":",
"s2",
"=",
"s2",
".",
"encode",
"(",
"'utf-8'",
")",
"(",
"m",
",",
"f",
",",
"n",
",",
"x",
",",
"s2",
")",
"=",
"s2",
".",
"split",
"(",
"':'",
")",
"return",
"streql",
"(",
"pbkdf2",
"(",
"s1",
"[",
":",
"1024",
"]",
",",
"x",
",",
"int",
"(",
"n",
")",
",",
"(",
"len",
"(",
"s2",
")",
"/",
"2",
")",
",",
"f",
")",
",",
"s2",
")"
] | returns true if the given strings are identical . | train | false |
47,509 | @contextmanager
def in_sighandler():
set_in_sighandler(True)
try:
(yield)
finally:
set_in_sighandler(False)
| [
"@",
"contextmanager",
"def",
"in_sighandler",
"(",
")",
":",
"set_in_sighandler",
"(",
"True",
")",
"try",
":",
"(",
"yield",
")",
"finally",
":",
"set_in_sighandler",
"(",
"False",
")"
] | context that records that we are in a signal handler . | train | false |
47,510 | def uncompact(creation_sequence):
first = creation_sequence[0]
if isinstance(first, str):
return creation_sequence
elif isinstance(first, tuple):
return creation_sequence
elif isinstance(first, int):
ccscopy = creation_sequence[:]
else:
raise TypeError('Not a valid creation sequence type')
cs = []
while ccscopy:
cs.extend((ccscopy.pop(0) * ['d']))
if ccscopy:
cs.extend((ccscopy.pop(0) * ['i']))
return cs
| [
"def",
"uncompact",
"(",
"creation_sequence",
")",
":",
"first",
"=",
"creation_sequence",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"first",
",",
"str",
")",
":",
"return",
"creation_sequence",
"elif",
"isinstance",
"(",
"first",
",",
"tuple",
")",
":",
"return",
"creation_sequence",
"elif",
"isinstance",
"(",
"first",
",",
"int",
")",
":",
"ccscopy",
"=",
"creation_sequence",
"[",
":",
"]",
"else",
":",
"raise",
"TypeError",
"(",
"'Not a valid creation sequence type'",
")",
"cs",
"=",
"[",
"]",
"while",
"ccscopy",
":",
"cs",
".",
"extend",
"(",
"(",
"ccscopy",
".",
"pop",
"(",
"0",
")",
"*",
"[",
"'d'",
"]",
")",
")",
"if",
"ccscopy",
":",
"cs",
".",
"extend",
"(",
"(",
"ccscopy",
".",
"pop",
"(",
"0",
")",
"*",
"[",
"'i'",
"]",
")",
")",
"return",
"cs"
] | converts a compact creation sequence for a threshold graph to a standard creation sequence . | train | false |
47,511 | def db_update_user(**kwargs):
groups_post = kwargs.pop('groups')
admin_groups_post = kwargs.pop('admin_groups')
user_id = kwargs.pop('user_id')
user = User.objects.filter(id=user_id)
if user:
user_get = user[0]
password = kwargs.pop('password')
user.update(**kwargs)
if password.strip():
user_get.set_password(password)
user_get.save()
else:
return None
group_select = []
if groups_post:
for group_id in groups_post:
group = UserGroup.objects.filter(id=group_id)
group_select.extend(group)
user_get.group = group_select
if (admin_groups_post != ''):
user_get.admingroup_set.all().delete()
for group_id in admin_groups_post:
group = get_object(UserGroup, id=group_id)
AdminGroup(user=user, group=group).save()
| [
"def",
"db_update_user",
"(",
"**",
"kwargs",
")",
":",
"groups_post",
"=",
"kwargs",
".",
"pop",
"(",
"'groups'",
")",
"admin_groups_post",
"=",
"kwargs",
".",
"pop",
"(",
"'admin_groups'",
")",
"user_id",
"=",
"kwargs",
".",
"pop",
"(",
"'user_id'",
")",
"user",
"=",
"User",
".",
"objects",
".",
"filter",
"(",
"id",
"=",
"user_id",
")",
"if",
"user",
":",
"user_get",
"=",
"user",
"[",
"0",
"]",
"password",
"=",
"kwargs",
".",
"pop",
"(",
"'password'",
")",
"user",
".",
"update",
"(",
"**",
"kwargs",
")",
"if",
"password",
".",
"strip",
"(",
")",
":",
"user_get",
".",
"set_password",
"(",
"password",
")",
"user_get",
".",
"save",
"(",
")",
"else",
":",
"return",
"None",
"group_select",
"=",
"[",
"]",
"if",
"groups_post",
":",
"for",
"group_id",
"in",
"groups_post",
":",
"group",
"=",
"UserGroup",
".",
"objects",
".",
"filter",
"(",
"id",
"=",
"group_id",
")",
"group_select",
".",
"extend",
"(",
"group",
")",
"user_get",
".",
"group",
"=",
"group_select",
"if",
"(",
"admin_groups_post",
"!=",
"''",
")",
":",
"user_get",
".",
"admingroup_set",
".",
"all",
"(",
")",
".",
"delete",
"(",
")",
"for",
"group_id",
"in",
"admin_groups_post",
":",
"group",
"=",
"get_object",
"(",
"UserGroup",
",",
"id",
"=",
"group_id",
")",
"AdminGroup",
"(",
"user",
"=",
"user",
",",
"group",
"=",
"group",
")",
".",
"save",
"(",
")"
] | update a user info in database . | train | false |
47,513 | def _validate_ip_pools(data, valid_values=None):
if (not isinstance(data, list)):
msg = (_("Invalid data format for IP pool: '%s'") % data)
LOG.debug(msg)
return msg
expected_keys = ['start', 'end']
for ip_pool in data:
msg = _verify_dict_keys(expected_keys, ip_pool)
if msg:
LOG.debug(msg)
return msg
for k in expected_keys:
msg = _validate_ip_address(ip_pool[k])
if msg:
LOG.debug(msg)
return msg
| [
"def",
"_validate_ip_pools",
"(",
"data",
",",
"valid_values",
"=",
"None",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"data",
",",
"list",
")",
")",
":",
"msg",
"=",
"(",
"_",
"(",
"\"Invalid data format for IP pool: '%s'\"",
")",
"%",
"data",
")",
"LOG",
".",
"debug",
"(",
"msg",
")",
"return",
"msg",
"expected_keys",
"=",
"[",
"'start'",
",",
"'end'",
"]",
"for",
"ip_pool",
"in",
"data",
":",
"msg",
"=",
"_verify_dict_keys",
"(",
"expected_keys",
",",
"ip_pool",
")",
"if",
"msg",
":",
"LOG",
".",
"debug",
"(",
"msg",
")",
"return",
"msg",
"for",
"k",
"in",
"expected_keys",
":",
"msg",
"=",
"_validate_ip_address",
"(",
"ip_pool",
"[",
"k",
"]",
")",
"if",
"msg",
":",
"LOG",
".",
"debug",
"(",
"msg",
")",
"return",
"msg"
] | validate that start and end ip addresses are present in addition to this the ip addresses will also be validated . | train | false |
47,514 | def linebreaksbr(value, autoescape=None):
if (autoescape and (not isinstance(value, SafeData))):
from google.appengine._internal.django.utils.html import escape
value = escape(value)
return mark_safe(value.replace('\n', '<br />'))
| [
"def",
"linebreaksbr",
"(",
"value",
",",
"autoescape",
"=",
"None",
")",
":",
"if",
"(",
"autoescape",
"and",
"(",
"not",
"isinstance",
"(",
"value",
",",
"SafeData",
")",
")",
")",
":",
"from",
"google",
".",
"appengine",
".",
"_internal",
".",
"django",
".",
"utils",
".",
"html",
"import",
"escape",
"value",
"=",
"escape",
"(",
"value",
")",
"return",
"mark_safe",
"(",
"value",
".",
"replace",
"(",
"'\\n'",
",",
"'<br />'",
")",
")"
] | converts all newlines in a piece of plain text to html line breaks . | train | false |
47,516 | def check_console_width(val):
valid = True
message = (('-' * val) + '\n')
message += ('console_width set to %s, try a lower value if above line overlaps' % val)
return dict(valid=valid, message=message)
| [
"def",
"check_console_width",
"(",
"val",
")",
":",
"valid",
"=",
"True",
"message",
"=",
"(",
"(",
"'-'",
"*",
"val",
")",
"+",
"'\\n'",
")",
"message",
"+=",
"(",
"'console_width set to %s, try a lower value if above line overlaps'",
"%",
"val",
")",
"return",
"dict",
"(",
"valid",
"=",
"valid",
",",
"message",
"=",
"message",
")"
] | show ruler to check console width . | train | false |
47,517 | @register(u'end-kbd-macro')
def start_kbd_macro(event):
event.cli.input_processor.end_macro()
| [
"@",
"register",
"(",
"u'end-kbd-macro'",
")",
"def",
"start_kbd_macro",
"(",
"event",
")",
":",
"event",
".",
"cli",
".",
"input_processor",
".",
"end_macro",
"(",
")"
] | stop saving the characters typed into the current keyboard macro and save the definition . | train | false |
47,518 | def _do_names(names, fun, path=None):
ret = {}
hosts = find_guests(names, path=path)
if (not hosts):
return False
client = salt.client.get_local_client(__opts__['conf_file'])
for (host, sub_names) in six.iteritems(hosts):
cmds = []
for name in sub_names:
cmds.append(client.cmd_iter(host, 'lxc.{0}'.format(fun), [name], kwarg={'path': path}, timeout=60))
for cmd in cmds:
data = next(cmd)
data = data.get(host, {}).get('ret', None)
if data:
ret.update({host: data})
return ret
| [
"def",
"_do_names",
"(",
"names",
",",
"fun",
",",
"path",
"=",
"None",
")",
":",
"ret",
"=",
"{",
"}",
"hosts",
"=",
"find_guests",
"(",
"names",
",",
"path",
"=",
"path",
")",
"if",
"(",
"not",
"hosts",
")",
":",
"return",
"False",
"client",
"=",
"salt",
".",
"client",
".",
"get_local_client",
"(",
"__opts__",
"[",
"'conf_file'",
"]",
")",
"for",
"(",
"host",
",",
"sub_names",
")",
"in",
"six",
".",
"iteritems",
"(",
"hosts",
")",
":",
"cmds",
"=",
"[",
"]",
"for",
"name",
"in",
"sub_names",
":",
"cmds",
".",
"append",
"(",
"client",
".",
"cmd_iter",
"(",
"host",
",",
"'lxc.{0}'",
".",
"format",
"(",
"fun",
")",
",",
"[",
"name",
"]",
",",
"kwarg",
"=",
"{",
"'path'",
":",
"path",
"}",
",",
"timeout",
"=",
"60",
")",
")",
"for",
"cmd",
"in",
"cmds",
":",
"data",
"=",
"next",
"(",
"cmd",
")",
"data",
"=",
"data",
".",
"get",
"(",
"host",
",",
"{",
"}",
")",
".",
"get",
"(",
"'ret'",
",",
"None",
")",
"if",
"data",
":",
"ret",
".",
"update",
"(",
"{",
"host",
":",
"data",
"}",
")",
"return",
"ret"
] | invoke a function in the lxc module with no args path path to the container parent default: /var/lib/lxc . | train | true |
47,519 | def _proxy_cmd(command, *args, **kwargs):
proxy_prefix = __opts__['proxy']['proxytype']
proxy_cmd = '.'.join([proxy_prefix, command])
if (proxy_cmd not in __proxy__):
return False
for k in kwargs.keys():
if k.startswith('__pub_'):
kwargs.pop(k)
return __proxy__[proxy_cmd](*args, **kwargs)
| [
"def",
"_proxy_cmd",
"(",
"command",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"proxy_prefix",
"=",
"__opts__",
"[",
"'proxy'",
"]",
"[",
"'proxytype'",
"]",
"proxy_cmd",
"=",
"'.'",
".",
"join",
"(",
"[",
"proxy_prefix",
",",
"command",
"]",
")",
"if",
"(",
"proxy_cmd",
"not",
"in",
"__proxy__",
")",
":",
"return",
"False",
"for",
"k",
"in",
"kwargs",
".",
"keys",
"(",
")",
":",
"if",
"k",
".",
"startswith",
"(",
"'__pub_'",
")",
":",
"kwargs",
".",
"pop",
"(",
"k",
")",
"return",
"__proxy__",
"[",
"proxy_cmd",
"]",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] | run commands from __proxy__ :mod:salt . | train | false |
47,521 | def url_unparse(components):
(scheme, netloc, path, query, fragment) = normalize_string_tuple(components)
s = make_literal_wrapper(scheme)
url = s('')
if (netloc or (scheme and path.startswith(s('/')))):
if (path and (path[:1] != s('/'))):
path = (s('/') + path)
url = ((s('//') + (netloc or s(''))) + path)
elif path:
url += path
if scheme:
url = ((scheme + s(':')) + url)
if query:
url = ((url + s('?')) + query)
if fragment:
url = ((url + s('#')) + fragment)
return url
| [
"def",
"url_unparse",
"(",
"components",
")",
":",
"(",
"scheme",
",",
"netloc",
",",
"path",
",",
"query",
",",
"fragment",
")",
"=",
"normalize_string_tuple",
"(",
"components",
")",
"s",
"=",
"make_literal_wrapper",
"(",
"scheme",
")",
"url",
"=",
"s",
"(",
"''",
")",
"if",
"(",
"netloc",
"or",
"(",
"scheme",
"and",
"path",
".",
"startswith",
"(",
"s",
"(",
"'/'",
")",
")",
")",
")",
":",
"if",
"(",
"path",
"and",
"(",
"path",
"[",
":",
"1",
"]",
"!=",
"s",
"(",
"'/'",
")",
")",
")",
":",
"path",
"=",
"(",
"s",
"(",
"'/'",
")",
"+",
"path",
")",
"url",
"=",
"(",
"(",
"s",
"(",
"'//'",
")",
"+",
"(",
"netloc",
"or",
"s",
"(",
"''",
")",
")",
")",
"+",
"path",
")",
"elif",
"path",
":",
"url",
"+=",
"path",
"if",
"scheme",
":",
"url",
"=",
"(",
"(",
"scheme",
"+",
"s",
"(",
"':'",
")",
")",
"+",
"url",
")",
"if",
"query",
":",
"url",
"=",
"(",
"(",
"url",
"+",
"s",
"(",
"'?'",
")",
")",
"+",
"query",
")",
"if",
"fragment",
":",
"url",
"=",
"(",
"(",
"url",
"+",
"s",
"(",
"'#'",
")",
")",
"+",
"fragment",
")",
"return",
"url"
] | the reverse operation to :meth:url_parse . | train | true |
47,523 | def merge_headers_by_name(name, headers):
matching_headers = find_matching_headers(name, headers)
return ','.join((str(headers[h]) for h in matching_headers if (headers[h] is not None)))
| [
"def",
"merge_headers_by_name",
"(",
"name",
",",
"headers",
")",
":",
"matching_headers",
"=",
"find_matching_headers",
"(",
"name",
",",
"headers",
")",
"return",
"','",
".",
"join",
"(",
"(",
"str",
"(",
"headers",
"[",
"h",
"]",
")",
"for",
"h",
"in",
"matching_headers",
"if",
"(",
"headers",
"[",
"h",
"]",
"is",
"not",
"None",
")",
")",
")"
] | takes a specific header name and a dict of headers {"name": "value"} . | train | false |
47,524 | def search_by_tag(resource_search_uri, ipaddr, port):
str_uri = six.text_type(resource_search_uri)
if (('search' in str_uri) and ('?tag=' in str_uri)):
(s, h) = service_json_request(ipaddr, port, 'GET', resource_search_uri, None)
o = json_decode(s)
if (not o):
return None
resources = get_node_value(o, 'resource')
resource_uris = []
for resource in resources:
resource_uris.append(resource['id'])
return resource_uris
else:
raise CoprHdError(CoprHdError.VALUE_ERR, (_('Search URI %s is not in the expected format, it should end with ?tag={0}') % str_uri))
| [
"def",
"search_by_tag",
"(",
"resource_search_uri",
",",
"ipaddr",
",",
"port",
")",
":",
"str_uri",
"=",
"six",
".",
"text_type",
"(",
"resource_search_uri",
")",
"if",
"(",
"(",
"'search'",
"in",
"str_uri",
")",
"and",
"(",
"'?tag='",
"in",
"str_uri",
")",
")",
":",
"(",
"s",
",",
"h",
")",
"=",
"service_json_request",
"(",
"ipaddr",
",",
"port",
",",
"'GET'",
",",
"resource_search_uri",
",",
"None",
")",
"o",
"=",
"json_decode",
"(",
"s",
")",
"if",
"(",
"not",
"o",
")",
":",
"return",
"None",
"resources",
"=",
"get_node_value",
"(",
"o",
",",
"'resource'",
")",
"resource_uris",
"=",
"[",
"]",
"for",
"resource",
"in",
"resources",
":",
"resource_uris",
".",
"append",
"(",
"resource",
"[",
"'id'",
"]",
")",
"return",
"resource_uris",
"else",
":",
"raise",
"CoprHdError",
"(",
"CoprHdError",
".",
"VALUE_ERR",
",",
"(",
"_",
"(",
"'Search URI %s is not in the expected format, it should end with ?tag={0}'",
")",
"%",
"str_uri",
")",
")"
] | fetches the list of resources with a given tag . | train | false |
47,525 | @require_admin_context
def instance_type_create(context, values):
session = get_session()
with session.begin():
try:
instance_type_get_by_name(context, values['name'], session)
raise exception.InstanceTypeExists(name=values['name'])
except exception.InstanceTypeNotFoundByName:
pass
try:
instance_type_get_by_flavor_id(context, values['flavorid'], session)
raise exception.InstanceTypeIdExists(flavor_id=values['flavorid'])
except exception.FlavorNotFound:
pass
try:
specs = values.get('extra_specs')
specs_refs = []
if specs:
for (k, v) in specs.iteritems():
specs_ref = models.InstanceTypeExtraSpecs()
specs_ref['key'] = k
specs_ref['value'] = v
specs_refs.append(specs_ref)
values['extra_specs'] = specs_refs
instance_type_ref = models.InstanceTypes()
instance_type_ref.update(values)
instance_type_ref.save(session=session)
except Exception as e:
raise db_exc.DBError(e)
return _dict_with_extra_specs(instance_type_ref)
| [
"@",
"require_admin_context",
"def",
"instance_type_create",
"(",
"context",
",",
"values",
")",
":",
"session",
"=",
"get_session",
"(",
")",
"with",
"session",
".",
"begin",
"(",
")",
":",
"try",
":",
"instance_type_get_by_name",
"(",
"context",
",",
"values",
"[",
"'name'",
"]",
",",
"session",
")",
"raise",
"exception",
".",
"InstanceTypeExists",
"(",
"name",
"=",
"values",
"[",
"'name'",
"]",
")",
"except",
"exception",
".",
"InstanceTypeNotFoundByName",
":",
"pass",
"try",
":",
"instance_type_get_by_flavor_id",
"(",
"context",
",",
"values",
"[",
"'flavorid'",
"]",
",",
"session",
")",
"raise",
"exception",
".",
"InstanceTypeIdExists",
"(",
"flavor_id",
"=",
"values",
"[",
"'flavorid'",
"]",
")",
"except",
"exception",
".",
"FlavorNotFound",
":",
"pass",
"try",
":",
"specs",
"=",
"values",
".",
"get",
"(",
"'extra_specs'",
")",
"specs_refs",
"=",
"[",
"]",
"if",
"specs",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"specs",
".",
"iteritems",
"(",
")",
":",
"specs_ref",
"=",
"models",
".",
"InstanceTypeExtraSpecs",
"(",
")",
"specs_ref",
"[",
"'key'",
"]",
"=",
"k",
"specs_ref",
"[",
"'value'",
"]",
"=",
"v",
"specs_refs",
".",
"append",
"(",
"specs_ref",
")",
"values",
"[",
"'extra_specs'",
"]",
"=",
"specs_refs",
"instance_type_ref",
"=",
"models",
".",
"InstanceTypes",
"(",
")",
"instance_type_ref",
".",
"update",
"(",
"values",
")",
"instance_type_ref",
".",
"save",
"(",
"session",
"=",
"session",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"db_exc",
".",
"DBError",
"(",
"e",
")",
"return",
"_dict_with_extra_specs",
"(",
"instance_type_ref",
")"
] | create a new instance type . | train | false |
47,526 | @pytest.mark.parametrize('model', CLASSES)
def test_list_size(model):
assert (len(model.COLUMN_WIDTHS) == 3)
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'model'",
",",
"CLASSES",
")",
"def",
"test_list_size",
"(",
"model",
")",
":",
"assert",
"(",
"len",
"(",
"model",
".",
"COLUMN_WIDTHS",
")",
"==",
"3",
")"
] | test if there are 3 items in the column_widths property . | train | false |
47,529 | def cucumber_testresult(registry, xml_parent, data):
cucumber_result = XML.SubElement(xml_parent, 'org.jenkinsci.plugins.cucumber.jsontestsupport.CucumberTestResultArchiver')
cucumber_result.set('plugin', 'cucumber-testresult-plugin')
mappings = [('results', 'testResults', None), ('ignore-bad-steps', 'ignoreBadSteps', False)]
helpers.convert_mapping_to_xml(cucumber_result, data, mappings, fail_required=True)
| [
"def",
"cucumber_testresult",
"(",
"registry",
",",
"xml_parent",
",",
"data",
")",
":",
"cucumber_result",
"=",
"XML",
".",
"SubElement",
"(",
"xml_parent",
",",
"'org.jenkinsci.plugins.cucumber.jsontestsupport.CucumberTestResultArchiver'",
")",
"cucumber_result",
".",
"set",
"(",
"'plugin'",
",",
"'cucumber-testresult-plugin'",
")",
"mappings",
"=",
"[",
"(",
"'results'",
",",
"'testResults'",
",",
"None",
")",
",",
"(",
"'ignore-bad-steps'",
",",
"'ignoreBadSteps'",
",",
"False",
")",
"]",
"helpers",
".",
"convert_mapping_to_xml",
"(",
"cucumber_result",
",",
"data",
",",
"mappings",
",",
"fail_required",
"=",
"True",
")"
] | yaml: cucumber-testresult publish cucumber test results . | train | false |
47,532 | def ValidFilename(filename):
if (_file_path_positive_re.match(filename) is None):
return ('Invalid character in filename: %s' % filename)
if (_file_path_negative_1_re.search(filename) is not None):
return ('Filename cannot contain "." or ".." or start with "-" or "_ah/": %s' % filename)
if (_file_path_negative_2_re.search(filename) is not None):
return ('Filename cannot have trailing / or contain //: %s' % filename)
if (_file_path_negative_3_re.search(filename) is not None):
return ('Any spaces must be in the middle of a filename: %s' % filename)
return ''
| [
"def",
"ValidFilename",
"(",
"filename",
")",
":",
"if",
"(",
"_file_path_positive_re",
".",
"match",
"(",
"filename",
")",
"is",
"None",
")",
":",
"return",
"(",
"'Invalid character in filename: %s'",
"%",
"filename",
")",
"if",
"(",
"_file_path_negative_1_re",
".",
"search",
"(",
"filename",
")",
"is",
"not",
"None",
")",
":",
"return",
"(",
"'Filename cannot contain \".\" or \"..\" or start with \"-\" or \"_ah/\": %s'",
"%",
"filename",
")",
"if",
"(",
"_file_path_negative_2_re",
".",
"search",
"(",
"filename",
")",
"is",
"not",
"None",
")",
":",
"return",
"(",
"'Filename cannot have trailing / or contain //: %s'",
"%",
"filename",
")",
"if",
"(",
"_file_path_negative_3_re",
".",
"search",
"(",
"filename",
")",
"is",
"not",
"None",
")",
":",
"return",
"(",
"'Any spaces must be in the middle of a filename: %s'",
"%",
"filename",
")",
"return",
"''"
] | determines if filename is valid . | train | false |
47,533 | def response_cookies_deep_copy():
raw_headers = parse.remote_response.raw._original_response.headers._headers
header_cookies_string_list = []
for (name, value) in raw_headers:
if (name.lower() == 'set-cookie'):
if (my_host_scheme == 'http://'):
value = value.replace('Secure;', '')
value = value.replace(';Secure', ';')
value = value.replace('; Secure', ';')
if ('httponly' in value.lower()):
if enable_aggressive_cookies_path_rewrite:
value = regex_cookie_path_rewriter.sub('path=/;', value)
elif (enable_aggressive_cookies_path_rewrite is not None):
if (parse.remote_domain not in domain_alias_to_target_set):
value = regex_cookie_path_rewriter.sub((('\\g<prefix>=/extdomains/' + parse.remote_domain) + '\\g<path>'), value)
header_cookies_string_list.append(value)
return header_cookies_string_list
| [
"def",
"response_cookies_deep_copy",
"(",
")",
":",
"raw_headers",
"=",
"parse",
".",
"remote_response",
".",
"raw",
".",
"_original_response",
".",
"headers",
".",
"_headers",
"header_cookies_string_list",
"=",
"[",
"]",
"for",
"(",
"name",
",",
"value",
")",
"in",
"raw_headers",
":",
"if",
"(",
"name",
".",
"lower",
"(",
")",
"==",
"'set-cookie'",
")",
":",
"if",
"(",
"my_host_scheme",
"==",
"'http://'",
")",
":",
"value",
"=",
"value",
".",
"replace",
"(",
"'Secure;'",
",",
"''",
")",
"value",
"=",
"value",
".",
"replace",
"(",
"';Secure'",
",",
"';'",
")",
"value",
"=",
"value",
".",
"replace",
"(",
"'; Secure'",
",",
"';'",
")",
"if",
"(",
"'httponly'",
"in",
"value",
".",
"lower",
"(",
")",
")",
":",
"if",
"enable_aggressive_cookies_path_rewrite",
":",
"value",
"=",
"regex_cookie_path_rewriter",
".",
"sub",
"(",
"'path=/;'",
",",
"value",
")",
"elif",
"(",
"enable_aggressive_cookies_path_rewrite",
"is",
"not",
"None",
")",
":",
"if",
"(",
"parse",
".",
"remote_domain",
"not",
"in",
"domain_alias_to_target_set",
")",
":",
"value",
"=",
"regex_cookie_path_rewriter",
".",
"sub",
"(",
"(",
"(",
"'\\\\g<prefix>=/extdomains/'",
"+",
"parse",
".",
"remote_domain",
")",
"+",
"'\\\\g<path>'",
")",
",",
"value",
")",
"header_cookies_string_list",
".",
"append",
"(",
"value",
")",
"return",
"header_cookies_string_list"
] | its a bad hack to get raw cookies headers . | train | false |
47,534 | def fix_exe_for_code_signing(filename):
exe_data = MachO(filename)
cmds = exe_data.headers[0].commands
file_size = exe_data.headers[0].size
for c in cmds:
if (c[0].get_cmd_name() == 'LC_SYMTAB'):
data = c[1]
new_strsize = (file_size - data.stroff)
data.strsize = new_strsize
linkedit = cmds[3][1]
new_segsize = (file_size - linkedit.fileoff)
linkedit.filesize = new_segsize
linkedit.vmsize = new_segsize
fp = open(exe_data.filename, 'rb+')
exe_data.write(fp)
fp.close()
| [
"def",
"fix_exe_for_code_signing",
"(",
"filename",
")",
":",
"exe_data",
"=",
"MachO",
"(",
"filename",
")",
"cmds",
"=",
"exe_data",
".",
"headers",
"[",
"0",
"]",
".",
"commands",
"file_size",
"=",
"exe_data",
".",
"headers",
"[",
"0",
"]",
".",
"size",
"for",
"c",
"in",
"cmds",
":",
"if",
"(",
"c",
"[",
"0",
"]",
".",
"get_cmd_name",
"(",
")",
"==",
"'LC_SYMTAB'",
")",
":",
"data",
"=",
"c",
"[",
"1",
"]",
"new_strsize",
"=",
"(",
"file_size",
"-",
"data",
".",
"stroff",
")",
"data",
".",
"strsize",
"=",
"new_strsize",
"linkedit",
"=",
"cmds",
"[",
"3",
"]",
"[",
"1",
"]",
"new_segsize",
"=",
"(",
"file_size",
"-",
"linkedit",
".",
"fileoff",
")",
"linkedit",
".",
"filesize",
"=",
"new_segsize",
"linkedit",
".",
"vmsize",
"=",
"new_segsize",
"fp",
"=",
"open",
"(",
"exe_data",
".",
"filename",
",",
"'rb+'",
")",
"exe_data",
".",
"write",
"(",
"fp",
")",
"fp",
".",
"close",
"(",
")"
] | fixes the mach-o headers to make code signing possible . | train | false |
47,535 | def _convert_to_naive_utc_time(time):
if (time.tzinfo is not None):
offset = time.utcoffset()
offset = (offset if offset else datetime.timedelta())
return (time.replace(tzinfo=None) - offset)
else:
return time
| [
"def",
"_convert_to_naive_utc_time",
"(",
"time",
")",
":",
"if",
"(",
"time",
".",
"tzinfo",
"is",
"not",
"None",
")",
":",
"offset",
"=",
"time",
".",
"utcoffset",
"(",
")",
"offset",
"=",
"(",
"offset",
"if",
"offset",
"else",
"datetime",
".",
"timedelta",
"(",
")",
")",
"return",
"(",
"time",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"-",
"offset",
")",
"else",
":",
"return",
"time"
] | normalizes a datetime to a naive datetime in utc . | train | false |
47,536 | def test_not_specified_errors():
symsystem1 = SymbolicSystem(states, comb_explicit_rhs)
with raises(AttributeError):
symsystem1.comb_implicit_mat
with raises(AttributeError):
symsystem1.comb_implicit_rhs
with raises(AttributeError):
symsystem1.dyn_implicit_mat
with raises(AttributeError):
symsystem1.dyn_implicit_rhs
with raises(AttributeError):
symsystem1.kin_explicit_rhs
with raises(AttributeError):
symsystem1.compute_explicit_form()
symsystem2 = SymbolicSystem(coordinates, comb_implicit_rhs, speeds=speeds, mass_matrix=comb_implicit_mat)
with raises(AttributeError):
symsystem2.dyn_implicit_mat
with raises(AttributeError):
symsystem2.dyn_implicit_rhs
with raises(AttributeError):
symsystem2.kin_explicit_rhs
with raises(AttributeError):
symsystem1.coordinates
with raises(AttributeError):
symsystem1.speeds
with raises(AttributeError):
symsystem1.bodies
with raises(AttributeError):
symsystem1.loads
with raises(AttributeError):
symsystem2.comb_explicit_rhs
| [
"def",
"test_not_specified_errors",
"(",
")",
":",
"symsystem1",
"=",
"SymbolicSystem",
"(",
"states",
",",
"comb_explicit_rhs",
")",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"comb_implicit_mat",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"comb_implicit_rhs",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"dyn_implicit_mat",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"dyn_implicit_rhs",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"kin_explicit_rhs",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"compute_explicit_form",
"(",
")",
"symsystem2",
"=",
"SymbolicSystem",
"(",
"coordinates",
",",
"comb_implicit_rhs",
",",
"speeds",
"=",
"speeds",
",",
"mass_matrix",
"=",
"comb_implicit_mat",
")",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem2",
".",
"dyn_implicit_mat",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem2",
".",
"dyn_implicit_rhs",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem2",
".",
"kin_explicit_rhs",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"coordinates",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"speeds",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"bodies",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem1",
".",
"loads",
"with",
"raises",
"(",
"AttributeError",
")",
":",
"symsystem2",
".",
"comb_explicit_rhs"
] | this test will cover errors that arise from trying to access attributes that were not specificed upon object creation or were specified on creation and the user trys to recalculate them . | train | false |
47,539 | def guard_restart():
global RESTART_REQ
sabnzbd.RESTART_REQ = True
| [
"def",
"guard_restart",
"(",
")",
":",
"global",
"RESTART_REQ",
"sabnzbd",
".",
"RESTART_REQ",
"=",
"True"
] | callback for config options requiring a restart . | train | false |
47,540 | def _DoesTargetDependOnMatchingTargets(target):
if (target.match_status == MATCH_STATUS_DOESNT_MATCH):
return False
if ((target.match_status == MATCH_STATUS_MATCHES) or (target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY)):
return True
for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
print ' DCTB ', target.name, 'matches by dep', dep.name
return True
target.match_status = MATCH_STATUS_DOESNT_MATCH
return False
| [
"def",
"_DoesTargetDependOnMatchingTargets",
"(",
"target",
")",
":",
"if",
"(",
"target",
".",
"match_status",
"==",
"MATCH_STATUS_DOESNT_MATCH",
")",
":",
"return",
"False",
"if",
"(",
"(",
"target",
".",
"match_status",
"==",
"MATCH_STATUS_MATCHES",
")",
"or",
"(",
"target",
".",
"match_status",
"==",
"MATCH_STATUS_MATCHES_BY_DEPENDENCY",
")",
")",
":",
"return",
"True",
"for",
"dep",
"in",
"target",
".",
"deps",
":",
"if",
"_DoesTargetDependOnMatchingTargets",
"(",
"dep",
")",
":",
"target",
".",
"match_status",
"=",
"MATCH_STATUS_MATCHES_BY_DEPENDENCY",
"print",
"' DCTB '",
",",
"target",
".",
"name",
",",
"'matches by dep'",
",",
"dep",
".",
"name",
"return",
"True",
"target",
".",
"match_status",
"=",
"MATCH_STATUS_DOESNT_MATCH",
"return",
"False"
] | returns true if |target| or any of its dependencies is one of the targets containing the files supplied as input to analyzer . | train | false |
47,541 | def create_clip_xml_info(readlen, adapl, adapr, quall, qualr):
to_print = ['']
if (adapr >= readlen):
adapr = 0
if (qualr >= readlen):
qualr = 0
if (adapr < 0):
adapr = 0
if (qualr < 0):
qualr = 0
if (adapl < 0):
adapl = 0
if (quall < 0):
quall = 0
if quall:
to_print.append(' <clip_quality_left>')
to_print.append(str(quall))
to_print.append('</clip_quality_left>\n')
if qualr:
to_print.append(' <clip_quality_right>')
to_print.append(str(qualr))
to_print.append('</clip_quality_right>\n')
if adapl:
to_print.append(' <clip_vector_left>')
to_print.append(str(adapl))
to_print.append('</clip_vector_left>\n')
if adapr:
to_print.append(' <clip_vector_right>')
to_print.append(str(adapr))
to_print.append('</clip_vector_right>\n')
return ''.join(to_print)
| [
"def",
"create_clip_xml_info",
"(",
"readlen",
",",
"adapl",
",",
"adapr",
",",
"quall",
",",
"qualr",
")",
":",
"to_print",
"=",
"[",
"''",
"]",
"if",
"(",
"adapr",
">=",
"readlen",
")",
":",
"adapr",
"=",
"0",
"if",
"(",
"qualr",
">=",
"readlen",
")",
":",
"qualr",
"=",
"0",
"if",
"(",
"adapr",
"<",
"0",
")",
":",
"adapr",
"=",
"0",
"if",
"(",
"qualr",
"<",
"0",
")",
":",
"qualr",
"=",
"0",
"if",
"(",
"adapl",
"<",
"0",
")",
":",
"adapl",
"=",
"0",
"if",
"(",
"quall",
"<",
"0",
")",
":",
"quall",
"=",
"0",
"if",
"quall",
":",
"to_print",
".",
"append",
"(",
"' <clip_quality_left>'",
")",
"to_print",
".",
"append",
"(",
"str",
"(",
"quall",
")",
")",
"to_print",
".",
"append",
"(",
"'</clip_quality_left>\\n'",
")",
"if",
"qualr",
":",
"to_print",
".",
"append",
"(",
"' <clip_quality_right>'",
")",
"to_print",
".",
"append",
"(",
"str",
"(",
"qualr",
")",
")",
"to_print",
".",
"append",
"(",
"'</clip_quality_right>\\n'",
")",
"if",
"adapl",
":",
"to_print",
".",
"append",
"(",
"' <clip_vector_left>'",
")",
"to_print",
".",
"append",
"(",
"str",
"(",
"adapl",
")",
")",
"to_print",
".",
"append",
"(",
"'</clip_vector_left>\\n'",
")",
"if",
"adapr",
":",
"to_print",
".",
"append",
"(",
"' <clip_vector_right>'",
")",
"to_print",
".",
"append",
"(",
"str",
"(",
"adapr",
")",
")",
"to_print",
".",
"append",
"(",
"'</clip_vector_right>\\n'",
")",
"return",
"''",
".",
"join",
"(",
"to_print",
")"
] | takes the clip values of the read and formats them into xml corrects "wrong" values that might have resulted through simplified calculations earlier in the process of conversion . | train | false |
47,542 | def _create_openpgp_cipher(factory, **kwargs):
iv = kwargs.pop('IV', None)
IV = kwargs.pop('iv', None)
if ((None, None) == (iv, IV)):
iv = get_random_bytes(factory.block_size)
if (iv is not None):
if (IV is not None):
raise TypeError("You must either use 'iv' or 'IV', not both")
else:
iv = IV
try:
key = kwargs.pop('key')
except KeyError as e:
raise TypeError(('Missing component: ' + str(e)))
return OpenPgpMode(factory, key, iv, kwargs)
| [
"def",
"_create_openpgp_cipher",
"(",
"factory",
",",
"**",
"kwargs",
")",
":",
"iv",
"=",
"kwargs",
".",
"pop",
"(",
"'IV'",
",",
"None",
")",
"IV",
"=",
"kwargs",
".",
"pop",
"(",
"'iv'",
",",
"None",
")",
"if",
"(",
"(",
"None",
",",
"None",
")",
"==",
"(",
"iv",
",",
"IV",
")",
")",
":",
"iv",
"=",
"get_random_bytes",
"(",
"factory",
".",
"block_size",
")",
"if",
"(",
"iv",
"is",
"not",
"None",
")",
":",
"if",
"(",
"IV",
"is",
"not",
"None",
")",
":",
"raise",
"TypeError",
"(",
"\"You must either use 'iv' or 'IV', not both\"",
")",
"else",
":",
"iv",
"=",
"IV",
"try",
":",
"key",
"=",
"kwargs",
".",
"pop",
"(",
"'key'",
")",
"except",
"KeyError",
"as",
"e",
":",
"raise",
"TypeError",
"(",
"(",
"'Missing component: '",
"+",
"str",
"(",
"e",
")",
")",
")",
"return",
"OpenPgpMode",
"(",
"factory",
",",
"key",
",",
"iv",
",",
"kwargs",
")"
] | create a new block cipher . | train | false |
47,543 | def getLong(value):
return long(value)
| [
"def",
"getLong",
"(",
"value",
")",
":",
"return",
"long",
"(",
"value",
")"
] | get the long . | train | false |
47,546 | def _get_new_toolbox(app):
from galaxy import tools
from galaxy.tools.special_tools import load_lib_tools
from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
app.tool_version_cache = ToolVersionCache(app)
tool_configs = app.config.tool_configs
if (app.config.migrated_tools_config not in tool_configs):
tool_configs.append(app.config.migrated_tools_config)
start = time.time()
new_toolbox = tools.ToolBox(tool_configs, app.config.tool_path, app, app.toolbox._tool_conf_watcher)
new_toolbox.data_manager_tools = app.toolbox.data_manager_tools
load_lib_tools(new_toolbox)
new_toolbox.load_hidden_lib_tool('galaxy/datatypes/set_metadata_tool.xml')
[new_toolbox.register_tool(tool) for tool in new_toolbox.data_manager_tools.values()]
end = (time.time() - start)
log.debug('Toolbox reload took %d seconds', end)
app.reindex_tool_search(new_toolbox)
return new_toolbox
| [
"def",
"_get_new_toolbox",
"(",
"app",
")",
":",
"from",
"galaxy",
"import",
"tools",
"from",
"galaxy",
".",
"tools",
".",
"special_tools",
"import",
"load_lib_tools",
"from",
"galaxy",
".",
"tools",
".",
"toolbox",
".",
"lineages",
".",
"tool_shed",
"import",
"ToolVersionCache",
"app",
".",
"tool_version_cache",
"=",
"ToolVersionCache",
"(",
"app",
")",
"tool_configs",
"=",
"app",
".",
"config",
".",
"tool_configs",
"if",
"(",
"app",
".",
"config",
".",
"migrated_tools_config",
"not",
"in",
"tool_configs",
")",
":",
"tool_configs",
".",
"append",
"(",
"app",
".",
"config",
".",
"migrated_tools_config",
")",
"start",
"=",
"time",
".",
"time",
"(",
")",
"new_toolbox",
"=",
"tools",
".",
"ToolBox",
"(",
"tool_configs",
",",
"app",
".",
"config",
".",
"tool_path",
",",
"app",
",",
"app",
".",
"toolbox",
".",
"_tool_conf_watcher",
")",
"new_toolbox",
".",
"data_manager_tools",
"=",
"app",
".",
"toolbox",
".",
"data_manager_tools",
"load_lib_tools",
"(",
"new_toolbox",
")",
"new_toolbox",
".",
"load_hidden_lib_tool",
"(",
"'galaxy/datatypes/set_metadata_tool.xml'",
")",
"[",
"new_toolbox",
".",
"register_tool",
"(",
"tool",
")",
"for",
"tool",
"in",
"new_toolbox",
".",
"data_manager_tools",
".",
"values",
"(",
")",
"]",
"end",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start",
")",
"log",
".",
"debug",
"(",
"'Toolbox reload took %d seconds'",
",",
"end",
")",
"app",
".",
"reindex_tool_search",
"(",
"new_toolbox",
")",
"return",
"new_toolbox"
] | generate a new toolbox . | train | false |
47,547 | def get_arp_ip_mac_pairs(device_name, namespace):
device = ip_lib.IPDevice(device_name, namespace)
for entry in device.neigh.show(ip_version=4).splitlines():
match = IPV4_NEIGH_REGEXP.match(entry)
if match:
(yield (match.group('ip'), match.group('mac')))
| [
"def",
"get_arp_ip_mac_pairs",
"(",
"device_name",
",",
"namespace",
")",
":",
"device",
"=",
"ip_lib",
".",
"IPDevice",
"(",
"device_name",
",",
"namespace",
")",
"for",
"entry",
"in",
"device",
".",
"neigh",
".",
"show",
"(",
"ip_version",
"=",
"4",
")",
".",
"splitlines",
"(",
")",
":",
"match",
"=",
"IPV4_NEIGH_REGEXP",
".",
"match",
"(",
"entry",
")",
"if",
"match",
":",
"(",
"yield",
"(",
"match",
".",
"group",
"(",
"'ip'",
")",
",",
"match",
".",
"group",
"(",
"'mac'",
")",
")",
")"
] | generate pairs from devices ip neigh . | train | false |
47,548 | def _get_displayed_page_numbers(current, final):
assert (current >= 1)
assert (final >= current)
if (final <= 5):
return list(range(1, (final + 1)))
included = {1, (current - 1), current, (current + 1), final}
if (current <= 4):
included.add(2)
included.add(3)
if (current >= (final - 3)):
included.add((final - 1))
included.add((final - 2))
included = [idx for idx in sorted(list(included)) if ((idx > 0) and (idx <= final))]
if (current > 4):
included.insert(1, None)
if (current < (final - 3)):
included.insert((len(included) - 1), None)
return included
| [
"def",
"_get_displayed_page_numbers",
"(",
"current",
",",
"final",
")",
":",
"assert",
"(",
"current",
">=",
"1",
")",
"assert",
"(",
"final",
">=",
"current",
")",
"if",
"(",
"final",
"<=",
"5",
")",
":",
"return",
"list",
"(",
"range",
"(",
"1",
",",
"(",
"final",
"+",
"1",
")",
")",
")",
"included",
"=",
"{",
"1",
",",
"(",
"current",
"-",
"1",
")",
",",
"current",
",",
"(",
"current",
"+",
"1",
")",
",",
"final",
"}",
"if",
"(",
"current",
"<=",
"4",
")",
":",
"included",
".",
"add",
"(",
"2",
")",
"included",
".",
"add",
"(",
"3",
")",
"if",
"(",
"current",
">=",
"(",
"final",
"-",
"3",
")",
")",
":",
"included",
".",
"add",
"(",
"(",
"final",
"-",
"1",
")",
")",
"included",
".",
"add",
"(",
"(",
"final",
"-",
"2",
")",
")",
"included",
"=",
"[",
"idx",
"for",
"idx",
"in",
"sorted",
"(",
"list",
"(",
"included",
")",
")",
"if",
"(",
"(",
"idx",
">",
"0",
")",
"and",
"(",
"idx",
"<=",
"final",
")",
")",
"]",
"if",
"(",
"current",
">",
"4",
")",
":",
"included",
".",
"insert",
"(",
"1",
",",
"None",
")",
"if",
"(",
"current",
"<",
"(",
"final",
"-",
"3",
")",
")",
":",
"included",
".",
"insert",
"(",
"(",
"len",
"(",
"included",
")",
"-",
"1",
")",
",",
"None",
")",
"return",
"included"
] | this utility function determines a list of page numbers to display . | train | false |
47,549 | def delete_draft(crispin_client, account_id, draft_id, args):
nylas_uid = args.get('nylas_uid')
message_id_header = args.get('message_id_header')
assert (nylas_uid or message_id_header), 'Need at least one header value'
remote_delete_draft(crispin_client, account_id, nylas_uid, message_id_header)
| [
"def",
"delete_draft",
"(",
"crispin_client",
",",
"account_id",
",",
"draft_id",
",",
"args",
")",
":",
"nylas_uid",
"=",
"args",
".",
"get",
"(",
"'nylas_uid'",
")",
"message_id_header",
"=",
"args",
".",
"get",
"(",
"'message_id_header'",
")",
"assert",
"(",
"nylas_uid",
"or",
"message_id_header",
")",
",",
"'Need at least one header value'",
"remote_delete_draft",
"(",
"crispin_client",
",",
"account_id",
",",
"nylas_uid",
",",
"message_id_header",
")"
] | delete a draft from the remote backend . | train | false |
47,550 | def crayon_palette(colors):
palette = [crayons[name] for name in colors]
return color_palette(palette, len(palette))
| [
"def",
"crayon_palette",
"(",
"colors",
")",
":",
"palette",
"=",
"[",
"crayons",
"[",
"name",
"]",
"for",
"name",
"in",
"colors",
"]",
"return",
"color_palette",
"(",
"palette",
",",
"len",
"(",
"palette",
")",
")"
] | make a palette with color names from crayola crayons . | train | false |
47,551 | def uu_encode(input, errors='strict', filename='<data>', mode=438):
assert (errors == 'strict')
from cStringIO import StringIO
from binascii import b2a_uu
infile = StringIO(str(input))
outfile = StringIO()
read = infile.read
write = outfile.write
write(('begin %o %s\n' % ((mode & 511), filename)))
chunk = read(45)
while chunk:
write(b2a_uu(chunk))
chunk = read(45)
write(' \nend\n')
return (outfile.getvalue(), len(input))
| [
"def",
"uu_encode",
"(",
"input",
",",
"errors",
"=",
"'strict'",
",",
"filename",
"=",
"'<data>'",
",",
"mode",
"=",
"438",
")",
":",
"assert",
"(",
"errors",
"==",
"'strict'",
")",
"from",
"cStringIO",
"import",
"StringIO",
"from",
"binascii",
"import",
"b2a_uu",
"infile",
"=",
"StringIO",
"(",
"str",
"(",
"input",
")",
")",
"outfile",
"=",
"StringIO",
"(",
")",
"read",
"=",
"infile",
".",
"read",
"write",
"=",
"outfile",
".",
"write",
"write",
"(",
"(",
"'begin %o %s\\n'",
"%",
"(",
"(",
"mode",
"&",
"511",
")",
",",
"filename",
")",
")",
")",
"chunk",
"=",
"read",
"(",
"45",
")",
"while",
"chunk",
":",
"write",
"(",
"b2a_uu",
"(",
"chunk",
")",
")",
"chunk",
"=",
"read",
"(",
"45",
")",
"write",
"(",
"' \\nend\\n'",
")",
"return",
"(",
"outfile",
".",
"getvalue",
"(",
")",
",",
"len",
"(",
"input",
")",
")"
] | encodes the object input and returns a tuple . | train | false |
47,554 | def _PropertyPresenter(dumper, prop):
prop_copy = copy.copy(prop)
if (prop.mode is None):
del prop_copy.mode
if (prop.direction is None):
del prop_copy.direction
return dumper.represent_object(prop_copy)
| [
"def",
"_PropertyPresenter",
"(",
"dumper",
",",
"prop",
")",
":",
"prop_copy",
"=",
"copy",
".",
"copy",
"(",
"prop",
")",
"if",
"(",
"prop",
".",
"mode",
"is",
"None",
")",
":",
"del",
"prop_copy",
".",
"mode",
"if",
"(",
"prop",
".",
"direction",
"is",
"None",
")",
":",
"del",
"prop_copy",
".",
"direction",
"return",
"dumper",
".",
"represent_object",
"(",
"prop_copy",
")"
] | a pyyaml presenter for property . | train | false |
47,555 | def to_text_string(obj, encoding=None):
if PY2:
if (encoding is None):
return unicode(obj)
else:
return unicode(obj, encoding)
elif (encoding is None):
return str(obj)
elif isinstance(obj, str):
return obj
else:
return str(obj, encoding)
| [
"def",
"to_text_string",
"(",
"obj",
",",
"encoding",
"=",
"None",
")",
":",
"if",
"PY2",
":",
"if",
"(",
"encoding",
"is",
"None",
")",
":",
"return",
"unicode",
"(",
"obj",
")",
"else",
":",
"return",
"unicode",
"(",
"obj",
",",
"encoding",
")",
"elif",
"(",
"encoding",
"is",
"None",
")",
":",
"return",
"str",
"(",
"obj",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"str",
")",
":",
"return",
"obj",
"else",
":",
"return",
"str",
"(",
"obj",
",",
"encoding",
")"
] | convert obj to text string . | train | true |
47,556 | def init_vprinting(**kwargs):
kwargs['str_printer'] = vsstrrepr
kwargs['pretty_printer'] = vpprint
kwargs['latex_printer'] = vlatex
init_printing(**kwargs)
| [
"def",
"init_vprinting",
"(",
"**",
"kwargs",
")",
":",
"kwargs",
"[",
"'str_printer'",
"]",
"=",
"vsstrrepr",
"kwargs",
"[",
"'pretty_printer'",
"]",
"=",
"vpprint",
"kwargs",
"[",
"'latex_printer'",
"]",
"=",
"vlatex",
"init_printing",
"(",
"**",
"kwargs",
")"
] | initializes time derivative printing for all sympy objects . | train | false |
47,557 | def simple_formset_factory(form, add_label='+', formset=BaseSimpleFormSet, initial=None):
attrs = {'form': form, 'add_label': add_label, 'initial': initial}
return type((form.__name__ + 'SimpleFormSet'), (formset,), attrs)
| [
"def",
"simple_formset_factory",
"(",
"form",
",",
"add_label",
"=",
"'+'",
",",
"formset",
"=",
"BaseSimpleFormSet",
",",
"initial",
"=",
"None",
")",
":",
"attrs",
"=",
"{",
"'form'",
":",
"form",
",",
"'add_label'",
":",
"add_label",
",",
"'initial'",
":",
"initial",
"}",
"return",
"type",
"(",
"(",
"form",
".",
"__name__",
"+",
"'SimpleFormSet'",
")",
",",
"(",
"formset",
",",
")",
",",
"attrs",
")"
] | return a formset for the given form class . | train | false |
47,559 | def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
return model_instance._populate_entity(_entity_class).ToPb()
| [
"def",
"model_to_protobuf",
"(",
"model_instance",
",",
"_entity_class",
"=",
"datastore",
".",
"Entity",
")",
":",
"return",
"model_instance",
".",
"_populate_entity",
"(",
"_entity_class",
")",
".",
"ToPb",
"(",
")"
] | encodes a model instance as a protocol buffer . | train | false |
47,560 | def substitute_keywords(string, user_id, context):
KEYWORD_FUNCTION_MAP = {'%%USER_ID%%': (lambda : anonymous_id_from_user_id(user_id)), '%%USER_FULLNAME%%': (lambda : context.get('name')), '%%COURSE_DISPLAY_NAME%%': (lambda : context.get('course_title')), '%%COURSE_END_DATE%%': (lambda : context.get('course_end_date'))}
for key in KEYWORD_FUNCTION_MAP.keys():
if (key in string):
substitutor = KEYWORD_FUNCTION_MAP[key]
string = string.replace(key, substitutor())
return string
| [
"def",
"substitute_keywords",
"(",
"string",
",",
"user_id",
",",
"context",
")",
":",
"KEYWORD_FUNCTION_MAP",
"=",
"{",
"'%%USER_ID%%'",
":",
"(",
"lambda",
":",
"anonymous_id_from_user_id",
"(",
"user_id",
")",
")",
",",
"'%%USER_FULLNAME%%'",
":",
"(",
"lambda",
":",
"context",
".",
"get",
"(",
"'name'",
")",
")",
",",
"'%%COURSE_DISPLAY_NAME%%'",
":",
"(",
"lambda",
":",
"context",
".",
"get",
"(",
"'course_title'",
")",
")",
",",
"'%%COURSE_END_DATE%%'",
":",
"(",
"lambda",
":",
"context",
".",
"get",
"(",
"'course_end_date'",
")",
")",
"}",
"for",
"key",
"in",
"KEYWORD_FUNCTION_MAP",
".",
"keys",
"(",
")",
":",
"if",
"(",
"key",
"in",
"string",
")",
":",
"substitutor",
"=",
"KEYWORD_FUNCTION_MAP",
"[",
"key",
"]",
"string",
"=",
"string",
".",
"replace",
"(",
"key",
",",
"substitutor",
"(",
")",
")",
"return",
"string"
] | replaces all %%-encoded words using keyword_function_map mapping functions iterates through all keywords that must be substituted and replaces them by calling the corresponding functions stored in keyword_function_map . | train | false |
47,561 | def getAllTransformedPaths(transformedPaths, xmlObject):
for archivableObject in xmlObject.archivableObjects:
transformedPaths += archivableObject.getTransformedPaths()
return transformedPaths
| [
"def",
"getAllTransformedPaths",
"(",
"transformedPaths",
",",
"xmlObject",
")",
":",
"for",
"archivableObject",
"in",
"xmlObject",
".",
"archivableObjects",
":",
"transformedPaths",
"+=",
"archivableObject",
".",
"getTransformedPaths",
"(",
")",
"return",
"transformedPaths"
] | get all transformed paths . | train | false |
47,562 | def substitute_category_name(category_name):
return re.sub('\\s', '_', category_name).replace('-', '_').lower()
| [
"def",
"substitute_category_name",
"(",
"category_name",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\\\s'",
",",
"'_'",
",",
"category_name",
")",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
".",
"lower",
"(",
")"
] | replaces whitespace and - characters in category_name to allow category_name to be made into a valid python identifier . | train | false |
47,563 | def load_states():
states = {}
__opts__['grains'] = salt.loader.grains(__opts__)
__opts__['pillar'] = __pillar__
lazy_utils = salt.loader.utils(__opts__)
lazy_funcs = salt.loader.minion_mods(__opts__, utils=lazy_utils)
lazy_serializers = salt.loader.serializers(__opts__)
lazy_states = salt.loader.states(__opts__, lazy_funcs, lazy_utils, lazy_serializers)
for (key, func) in six.iteritems(lazy_states):
if ('.' not in key):
continue
(mod_name, func_name) = key.split('.', 1)
if (mod_name not in states):
states[mod_name] = {}
states[mod_name][func_name] = func
__context__['pyobjects_states'] = states
| [
"def",
"load_states",
"(",
")",
":",
"states",
"=",
"{",
"}",
"__opts__",
"[",
"'grains'",
"]",
"=",
"salt",
".",
"loader",
".",
"grains",
"(",
"__opts__",
")",
"__opts__",
"[",
"'pillar'",
"]",
"=",
"__pillar__",
"lazy_utils",
"=",
"salt",
".",
"loader",
".",
"utils",
"(",
"__opts__",
")",
"lazy_funcs",
"=",
"salt",
".",
"loader",
".",
"minion_mods",
"(",
"__opts__",
",",
"utils",
"=",
"lazy_utils",
")",
"lazy_serializers",
"=",
"salt",
".",
"loader",
".",
"serializers",
"(",
"__opts__",
")",
"lazy_states",
"=",
"salt",
".",
"loader",
".",
"states",
"(",
"__opts__",
",",
"lazy_funcs",
",",
"lazy_utils",
",",
"lazy_serializers",
")",
"for",
"(",
"key",
",",
"func",
")",
"in",
"six",
".",
"iteritems",
"(",
"lazy_states",
")",
":",
"if",
"(",
"'.'",
"not",
"in",
"key",
")",
":",
"continue",
"(",
"mod_name",
",",
"func_name",
")",
"=",
"key",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"if",
"(",
"mod_name",
"not",
"in",
"states",
")",
":",
"states",
"[",
"mod_name",
"]",
"=",
"{",
"}",
"states",
"[",
"mod_name",
"]",
"[",
"func_name",
"]",
"=",
"func",
"__context__",
"[",
"'pyobjects_states'",
"]",
"=",
"states"
] | this loads our states into the salt __context__ . | train | true |
47,564 | @receiver(dbsignals.post_save, sender=ExternalAccount, dispatch_uid='add_employee_vouch_sig')
def add_employee_vouch(sender, instance, **kwargs):
if (kwargs.get('raw') or (not (instance.type == ExternalAccount.TYPE_EMAIL))):
return
instance.user.auto_vouch()
| [
"@",
"receiver",
"(",
"dbsignals",
".",
"post_save",
",",
"sender",
"=",
"ExternalAccount",
",",
"dispatch_uid",
"=",
"'add_employee_vouch_sig'",
")",
"def",
"add_employee_vouch",
"(",
"sender",
",",
"instance",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"kwargs",
".",
"get",
"(",
"'raw'",
")",
"or",
"(",
"not",
"(",
"instance",
".",
"type",
"==",
"ExternalAccount",
".",
"TYPE_EMAIL",
")",
")",
")",
":",
"return",
"instance",
".",
"user",
".",
"auto_vouch",
"(",
")"
] | add a vouch if an alternate email address is a mozilla* address . | train | false |
47,566 | def modelresource_factory(model, resource_class=ModelResource):
attrs = {u'model': model}
Meta = type(str(u'Meta'), (object,), attrs)
class_name = (model.__name__ + str(u'Resource'))
class_attrs = {u'Meta': Meta}
metaclass = ModelDeclarativeMetaclass
return metaclass(class_name, (resource_class,), class_attrs)
| [
"def",
"modelresource_factory",
"(",
"model",
",",
"resource_class",
"=",
"ModelResource",
")",
":",
"attrs",
"=",
"{",
"u'model'",
":",
"model",
"}",
"Meta",
"=",
"type",
"(",
"str",
"(",
"u'Meta'",
")",
",",
"(",
"object",
",",
")",
",",
"attrs",
")",
"class_name",
"=",
"(",
"model",
".",
"__name__",
"+",
"str",
"(",
"u'Resource'",
")",
")",
"class_attrs",
"=",
"{",
"u'Meta'",
":",
"Meta",
"}",
"metaclass",
"=",
"ModelDeclarativeMetaclass",
"return",
"metaclass",
"(",
"class_name",
",",
"(",
"resource_class",
",",
")",
",",
"class_attrs",
")"
] | factory for creating modelresource class for given django model . | train | true |
47,567 | def remove_files(basedir):
if (sys.version_info >= (3,)):
removelist = py2_only_files
msg = 'Python 2-only file'
else:
removelist = py3_only_files
msg = 'Python 3-only file'
for relpath in removelist:
path = os.path.join(basedir, relpath)
print ('Removing %s %r' % (msg, relpath))
os.remove(path)
| [
"def",
"remove_files",
"(",
"basedir",
")",
":",
"if",
"(",
"sys",
".",
"version_info",
">=",
"(",
"3",
",",
")",
")",
":",
"removelist",
"=",
"py2_only_files",
"msg",
"=",
"'Python 2-only file'",
"else",
":",
"removelist",
"=",
"py3_only_files",
"msg",
"=",
"'Python 3-only file'",
"for",
"relpath",
"in",
"removelist",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"basedir",
",",
"relpath",
")",
"print",
"(",
"'Removing %s %r'",
"%",
"(",
"msg",
",",
"relpath",
")",
")",
"os",
".",
"remove",
"(",
"path",
")"
] | remove unwanted files from the current source tree . | train | false |
47,568 | def read_key_value_file(csvfile):
reader = csv.reader(csvfile, delimiter=',')
next(reader)
kvstore = {}
for row in reader:
kvstore[row[0]] = row[1]
return kvstore
| [
"def",
"read_key_value_file",
"(",
"csvfile",
")",
":",
"reader",
"=",
"csv",
".",
"reader",
"(",
"csvfile",
",",
"delimiter",
"=",
"','",
")",
"next",
"(",
"reader",
")",
"kvstore",
"=",
"{",
"}",
"for",
"row",
"in",
"reader",
":",
"kvstore",
"[",
"row",
"[",
"0",
"]",
"]",
"=",
"row",
"[",
"1",
"]",
"return",
"kvstore"
] | reads csv file . | train | false |
47,569 | def test_slices_pos_different_dim():
with pytest.raises(ValueError) as e:
overlap_slices((4, 5), (1, 2), (0, 0, 3))
assert (u'the same number of dimensions' in str(e.value))
| [
"def",
"test_slices_pos_different_dim",
"(",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"ValueError",
")",
"as",
"e",
":",
"overlap_slices",
"(",
"(",
"4",
",",
"5",
")",
",",
"(",
"1",
",",
"2",
")",
",",
"(",
"0",
",",
"0",
",",
"3",
")",
")",
"assert",
"(",
"u'the same number of dimensions'",
"in",
"str",
"(",
"e",
".",
"value",
")",
")"
] | position must have same dim as arrays . | train | false |
47,570 | def RegisterModule(modName, modPath):
try:
import os
os.stat(modPath)
except os.error:
print ('Warning: Registering non-existant module %s' % modPath)
win32api.RegSetValue(GetRootKey(), (BuildDefaultPythonKey() + ('\\Modules\\%s' % modName)), win32con.REG_SZ, modPath)
| [
"def",
"RegisterModule",
"(",
"modName",
",",
"modPath",
")",
":",
"try",
":",
"import",
"os",
"os",
".",
"stat",
"(",
"modPath",
")",
"except",
"os",
".",
"error",
":",
"print",
"(",
"'Warning: Registering non-existant module %s'",
"%",
"modPath",
")",
"win32api",
".",
"RegSetValue",
"(",
"GetRootKey",
"(",
")",
",",
"(",
"BuildDefaultPythonKey",
"(",
")",
"+",
"(",
"'\\\\Modules\\\\%s'",
"%",
"modName",
")",
")",
",",
"win32con",
".",
"REG_SZ",
",",
"modPath",
")"
] | register an explicit module in the registry . | train | false |
47,571 | def box_blur(image, radius):
image.load()
return image._new(image.im.box_blur(radius))
| [
"def",
"box_blur",
"(",
"image",
",",
"radius",
")",
":",
"image",
".",
"load",
"(",
")",
"return",
"image",
".",
"_new",
"(",
"image",
".",
"im",
".",
"box_blur",
"(",
"radius",
")",
")"
] | blur the image by setting each pixel to the average value of the pixels in a square box extending radius pixels in each direction . | train | false |
47,572 | def idzp_aid(eps, A):
A = np.asfortranarray(A)
(m, n) = A.shape
(n2, w) = idz_frmi(m)
proj = np.empty((((n * ((2 * n2) + 1)) + n2) + 1), dtype='complex128', order='F')
(k, idx, proj) = _id.idzp_aid(eps, A, w, proj)
proj = proj[:(k * (n - k))].reshape((k, (n - k)), order='F')
return (k, idx, proj)
| [
"def",
"idzp_aid",
"(",
"eps",
",",
"A",
")",
":",
"A",
"=",
"np",
".",
"asfortranarray",
"(",
"A",
")",
"(",
"m",
",",
"n",
")",
"=",
"A",
".",
"shape",
"(",
"n2",
",",
"w",
")",
"=",
"idz_frmi",
"(",
"m",
")",
"proj",
"=",
"np",
".",
"empty",
"(",
"(",
"(",
"(",
"n",
"*",
"(",
"(",
"2",
"*",
"n2",
")",
"+",
"1",
")",
")",
"+",
"n2",
")",
"+",
"1",
")",
",",
"dtype",
"=",
"'complex128'",
",",
"order",
"=",
"'F'",
")",
"(",
"k",
",",
"idx",
",",
"proj",
")",
"=",
"_id",
".",
"idzp_aid",
"(",
"eps",
",",
"A",
",",
"w",
",",
"proj",
")",
"proj",
"=",
"proj",
"[",
":",
"(",
"k",
"*",
"(",
"n",
"-",
"k",
")",
")",
"]",
".",
"reshape",
"(",
"(",
"k",
",",
"(",
"n",
"-",
"k",
")",
")",
",",
"order",
"=",
"'F'",
")",
"return",
"(",
"k",
",",
"idx",
",",
"proj",
")"
] | compute id of a complex matrix to a specified relative precision using random sampling . | train | false |
47,575 | def get_scene_seasons(indexer_id):
exceptionsSeasonList = []
if (indexer_id not in exceptionsSeasonCache):
dbData = [x[u'doc'] for x in sickrage.srCore.cacheDB.db.get_many(u'scene_exceptions', indexer_id, with_doc=True)]
exceptionsSeasonList = list(set([int(x[u'season']) for x in dbData]))
if (not (indexer_id in exceptionsSeasonCache)):
exceptionsSeasonCache[indexer_id] = {}
exceptionsSeasonCache[indexer_id] = exceptionsSeasonList
else:
exceptionsSeasonList = exceptionsSeasonCache[indexer_id]
return exceptionsSeasonList
| [
"def",
"get_scene_seasons",
"(",
"indexer_id",
")",
":",
"exceptionsSeasonList",
"=",
"[",
"]",
"if",
"(",
"indexer_id",
"not",
"in",
"exceptionsSeasonCache",
")",
":",
"dbData",
"=",
"[",
"x",
"[",
"u'doc'",
"]",
"for",
"x",
"in",
"sickrage",
".",
"srCore",
".",
"cacheDB",
".",
"db",
".",
"get_many",
"(",
"u'scene_exceptions'",
",",
"indexer_id",
",",
"with_doc",
"=",
"True",
")",
"]",
"exceptionsSeasonList",
"=",
"list",
"(",
"set",
"(",
"[",
"int",
"(",
"x",
"[",
"u'season'",
"]",
")",
"for",
"x",
"in",
"dbData",
"]",
")",
")",
"if",
"(",
"not",
"(",
"indexer_id",
"in",
"exceptionsSeasonCache",
")",
")",
":",
"exceptionsSeasonCache",
"[",
"indexer_id",
"]",
"=",
"{",
"}",
"exceptionsSeasonCache",
"[",
"indexer_id",
"]",
"=",
"exceptionsSeasonList",
"else",
":",
"exceptionsSeasonList",
"=",
"exceptionsSeasonCache",
"[",
"indexer_id",
"]",
"return",
"exceptionsSeasonList"
] | return a list of season numbers that have scene exceptions . | train | false |
47,579 | def set_flannel_facts_if_unset(facts):
if ('common' in facts):
if ('use_flannel' not in facts['common']):
use_flannel = False
facts['common']['use_flannel'] = use_flannel
return facts
| [
"def",
"set_flannel_facts_if_unset",
"(",
"facts",
")",
":",
"if",
"(",
"'common'",
"in",
"facts",
")",
":",
"if",
"(",
"'use_flannel'",
"not",
"in",
"facts",
"[",
"'common'",
"]",
")",
":",
"use_flannel",
"=",
"False",
"facts",
"[",
"'common'",
"]",
"[",
"'use_flannel'",
"]",
"=",
"use_flannel",
"return",
"facts"
] | set flannel facts if not already present in facts dict dict: the facts dict updated with the flannel facts if missing args: facts : existing facts returns: dict: the facts dict updated with the flannel facts if they were not already present . | train | false |
47,580 | def processor():
return uname()[5]
| [
"def",
"processor",
"(",
")",
":",
"return",
"uname",
"(",
")",
"[",
"5",
"]"
] | returns the processor name . | train | false |
47,581 | def _take_last(a, skipna=True):
if (skipna is False):
return a.iloc[(-1)]
else:
group_dummy = np.ones(len(a.index))
last_row = a.groupby(group_dummy).last()
if isinstance(a, pd.DataFrame):
return pd.Series(last_row.values[0], index=a.columns)
else:
return last_row.values[0]
| [
"def",
"_take_last",
"(",
"a",
",",
"skipna",
"=",
"True",
")",
":",
"if",
"(",
"skipna",
"is",
"False",
")",
":",
"return",
"a",
".",
"iloc",
"[",
"(",
"-",
"1",
")",
"]",
"else",
":",
"group_dummy",
"=",
"np",
".",
"ones",
"(",
"len",
"(",
"a",
".",
"index",
")",
")",
"last_row",
"=",
"a",
".",
"groupby",
"(",
"group_dummy",
")",
".",
"last",
"(",
")",
"if",
"isinstance",
"(",
"a",
",",
"pd",
".",
"DataFrame",
")",
":",
"return",
"pd",
".",
"Series",
"(",
"last_row",
".",
"values",
"[",
"0",
"]",
",",
"index",
"=",
"a",
".",
"columns",
")",
"else",
":",
"return",
"last_row",
".",
"values",
"[",
"0",
"]"
] | take last row of dataframe / last value of series considering nan . | train | false |
47,584 | def _osquery(sql, format='json'):
ret = {'result': True}
cmd = 'osqueryi --json "{0}"'.format(sql)
res = __salt__['cmd.run_all'](cmd)
if (res['retcode'] == 0):
ret['data'] = json.loads(res['stdout'])
else:
ret['result'] = False
ret['error'] = res['stderr']
return ret
| [
"def",
"_osquery",
"(",
"sql",
",",
"format",
"=",
"'json'",
")",
":",
"ret",
"=",
"{",
"'result'",
":",
"True",
"}",
"cmd",
"=",
"'osqueryi --json \"{0}\"'",
".",
"format",
"(",
"sql",
")",
"res",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"if",
"(",
"res",
"[",
"'retcode'",
"]",
"==",
"0",
")",
":",
"ret",
"[",
"'data'",
"]",
"=",
"json",
".",
"loads",
"(",
"res",
"[",
"'stdout'",
"]",
")",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'error'",
"]",
"=",
"res",
"[",
"'stderr'",
"]",
"return",
"ret"
] | helper function to run raw osquery queries . | train | false |
47,585 | def improve_keys(data, metaquery=False):
if (not isinstance(data, dict)):
return data
if metaquery:
for key in six.iterkeys(data):
if ('.$' in key):
key_list = []
for k in quote_key(key):
key_list.append(k)
new_key = '.'.join(key_list)
data[new_key] = data.pop(key)
else:
for (key, value) in data.items():
if isinstance(value, dict):
improve_keys(value)
if ('.' in key):
new_dict = {}
for k in quote_key(key, reverse=True):
new = {}
new[k] = (new_dict if new_dict else data.pop(key))
new_dict = new
data.update(new_dict)
elif key.startswith('$'):
new_key = parse.quote(key)
data[new_key] = data.pop(key)
return data
| [
"def",
"improve_keys",
"(",
"data",
",",
"metaquery",
"=",
"False",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"data",
",",
"dict",
")",
")",
":",
"return",
"data",
"if",
"metaquery",
":",
"for",
"key",
"in",
"six",
".",
"iterkeys",
"(",
"data",
")",
":",
"if",
"(",
"'.$'",
"in",
"key",
")",
":",
"key_list",
"=",
"[",
"]",
"for",
"k",
"in",
"quote_key",
"(",
"key",
")",
":",
"key_list",
".",
"append",
"(",
"k",
")",
"new_key",
"=",
"'.'",
".",
"join",
"(",
"key_list",
")",
"data",
"[",
"new_key",
"]",
"=",
"data",
".",
"pop",
"(",
"key",
")",
"else",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"data",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"improve_keys",
"(",
"value",
")",
"if",
"(",
"'.'",
"in",
"key",
")",
":",
"new_dict",
"=",
"{",
"}",
"for",
"k",
"in",
"quote_key",
"(",
"key",
",",
"reverse",
"=",
"True",
")",
":",
"new",
"=",
"{",
"}",
"new",
"[",
"k",
"]",
"=",
"(",
"new_dict",
"if",
"new_dict",
"else",
"data",
".",
"pop",
"(",
"key",
")",
")",
"new_dict",
"=",
"new",
"data",
".",
"update",
"(",
"new_dict",
")",
"elif",
"key",
".",
"startswith",
"(",
"'$'",
")",
":",
"new_key",
"=",
"parse",
".",
"quote",
"(",
"key",
")",
"data",
"[",
"new_key",
"]",
"=",
"data",
".",
"pop",
"(",
"key",
")",
"return",
"data"
] | improves keys in dict if they contained . | train | false |
47,588 | def java_ver(release='', vendor='', vminfo=('', '', ''), osinfo=('', '', '')):
try:
import java.lang
except ImportError:
return (release, vendor, vminfo, osinfo)
vendor = _java_getprop('java.vendor', vendor)
release = _java_getprop('java.version', release)
(vm_name, vm_release, vm_vendor) = vminfo
vm_name = _java_getprop('java.vm.name', vm_name)
vm_vendor = _java_getprop('java.vm.vendor', vm_vendor)
vm_release = _java_getprop('java.vm.version', vm_release)
vminfo = (vm_name, vm_release, vm_vendor)
(os_name, os_version, os_arch) = osinfo
os_arch = _java_getprop('java.os.arch', os_arch)
os_name = _java_getprop('java.os.name', os_name)
os_version = _java_getprop('java.os.version', os_version)
osinfo = (os_name, os_version, os_arch)
return (release, vendor, vminfo, osinfo)
| [
"def",
"java_ver",
"(",
"release",
"=",
"''",
",",
"vendor",
"=",
"''",
",",
"vminfo",
"=",
"(",
"''",
",",
"''",
",",
"''",
")",
",",
"osinfo",
"=",
"(",
"''",
",",
"''",
",",
"''",
")",
")",
":",
"try",
":",
"import",
"java",
".",
"lang",
"except",
"ImportError",
":",
"return",
"(",
"release",
",",
"vendor",
",",
"vminfo",
",",
"osinfo",
")",
"vendor",
"=",
"_java_getprop",
"(",
"'java.vendor'",
",",
"vendor",
")",
"release",
"=",
"_java_getprop",
"(",
"'java.version'",
",",
"release",
")",
"(",
"vm_name",
",",
"vm_release",
",",
"vm_vendor",
")",
"=",
"vminfo",
"vm_name",
"=",
"_java_getprop",
"(",
"'java.vm.name'",
",",
"vm_name",
")",
"vm_vendor",
"=",
"_java_getprop",
"(",
"'java.vm.vendor'",
",",
"vm_vendor",
")",
"vm_release",
"=",
"_java_getprop",
"(",
"'java.vm.version'",
",",
"vm_release",
")",
"vminfo",
"=",
"(",
"vm_name",
",",
"vm_release",
",",
"vm_vendor",
")",
"(",
"os_name",
",",
"os_version",
",",
"os_arch",
")",
"=",
"osinfo",
"os_arch",
"=",
"_java_getprop",
"(",
"'java.os.arch'",
",",
"os_arch",
")",
"os_name",
"=",
"_java_getprop",
"(",
"'java.os.name'",
",",
"os_name",
")",
"os_version",
"=",
"_java_getprop",
"(",
"'java.os.version'",
",",
"os_version",
")",
"osinfo",
"=",
"(",
"os_name",
",",
"os_version",
",",
"os_arch",
")",
"return",
"(",
"release",
",",
"vendor",
",",
"vminfo",
",",
"osinfo",
")"
] | version interface for jython . | train | false |
47,589 | def is_walrus(s3_url):
if (s3_url is not None):
o = urlparse.urlparse(s3_url)
return (not o.hostname.endswith('amazonaws.com'))
else:
return False
| [
"def",
"is_walrus",
"(",
"s3_url",
")",
":",
"if",
"(",
"s3_url",
"is",
"not",
"None",
")",
":",
"o",
"=",
"urlparse",
".",
"urlparse",
"(",
"s3_url",
")",
"return",
"(",
"not",
"o",
".",
"hostname",
".",
"endswith",
"(",
"'amazonaws.com'",
")",
")",
"else",
":",
"return",
"False"
] | return true if its walrus endpoint . | train | false |
47,590 | def datetime_is_naive(dateTime):
return ((dateTime.tzinfo is None) or (dateTime.tzinfo.utcoffset(dateTime) is None))
| [
"def",
"datetime_is_naive",
"(",
"dateTime",
")",
":",
"return",
"(",
"(",
"dateTime",
".",
"tzinfo",
"is",
"None",
")",
"or",
"(",
"dateTime",
".",
"tzinfo",
".",
"utcoffset",
"(",
"dateTime",
")",
"is",
"None",
")",
")"
] | returns true if datetime is naive . | train | false |
47,592 | def isXSegmentIntersectingPath(path, segmentFirstX, segmentSecondX, segmentYMirror, y):
rotatedPath = getRotatedComplexes(segmentYMirror, path)
for pointIndex in xrange((len(rotatedPath) - 1)):
pointFirst = rotatedPath[pointIndex]
pointSecond = rotatedPath[(pointIndex + 1)]
if isLineIntersectingInsideXSegment(pointFirst, pointSecond, segmentFirstX, segmentSecondX, y):
return True
return False
| [
"def",
"isXSegmentIntersectingPath",
"(",
"path",
",",
"segmentFirstX",
",",
"segmentSecondX",
",",
"segmentYMirror",
",",
"y",
")",
":",
"rotatedPath",
"=",
"getRotatedComplexes",
"(",
"segmentYMirror",
",",
"path",
")",
"for",
"pointIndex",
"in",
"xrange",
"(",
"(",
"len",
"(",
"rotatedPath",
")",
"-",
"1",
")",
")",
":",
"pointFirst",
"=",
"rotatedPath",
"[",
"pointIndex",
"]",
"pointSecond",
"=",
"rotatedPath",
"[",
"(",
"pointIndex",
"+",
"1",
")",
"]",
"if",
"isLineIntersectingInsideXSegment",
"(",
"pointFirst",
",",
"pointSecond",
",",
"segmentFirstX",
",",
"segmentSecondX",
",",
"y",
")",
":",
"return",
"True",
"return",
"False"
] | determine if a path is crossing inside the x segment . | train | false |
47,594 | def strong_product(G, H):
GH = _init_product_graph(G, H)
GH.add_nodes_from(_node_product(G, H))
GH.add_edges_from(_nodes_cross_edges(G, H))
GH.add_edges_from(_edges_cross_nodes(G, H))
GH.add_edges_from(_directed_edges_cross_edges(G, H))
if (not GH.is_directed()):
GH.add_edges_from(_undirected_edges_cross_edges(G, H))
GH.name = (((('Strong product(' + G.name) + ',') + H.name) + ')')
return GH
| [
"def",
"strong_product",
"(",
"G",
",",
"H",
")",
":",
"GH",
"=",
"_init_product_graph",
"(",
"G",
",",
"H",
")",
"GH",
".",
"add_nodes_from",
"(",
"_node_product",
"(",
"G",
",",
"H",
")",
")",
"GH",
".",
"add_edges_from",
"(",
"_nodes_cross_edges",
"(",
"G",
",",
"H",
")",
")",
"GH",
".",
"add_edges_from",
"(",
"_edges_cross_nodes",
"(",
"G",
",",
"H",
")",
")",
"GH",
".",
"add_edges_from",
"(",
"_directed_edges_cross_edges",
"(",
"G",
",",
"H",
")",
")",
"if",
"(",
"not",
"GH",
".",
"is_directed",
"(",
")",
")",
":",
"GH",
".",
"add_edges_from",
"(",
"_undirected_edges_cross_edges",
"(",
"G",
",",
"H",
")",
")",
"GH",
".",
"name",
"=",
"(",
"(",
"(",
"(",
"'Strong product('",
"+",
"G",
".",
"name",
")",
"+",
"','",
")",
"+",
"H",
".",
"name",
")",
"+",
"')'",
")",
"return",
"GH"
] | return the strong product of g and h . | train | false |
47,595 | def item_tax(item, discounts):
price = item.get_price_per_item(discounts=discounts)
return (u'US::%s:y' % price.tax)
| [
"def",
"item_tax",
"(",
"item",
",",
"discounts",
")",
":",
"price",
"=",
"item",
".",
"get_price_per_item",
"(",
"discounts",
"=",
"discounts",
")",
"return",
"(",
"u'US::%s:y'",
"%",
"price",
".",
"tax",
")"
] | for some countries you need to set tax info read more: URL . | train | false |
47,596 | def _is_hidden_win(path):
hidden_mask = 2
attrs = ctypes.windll.kernel32.GetFileAttributesW(beets.util.syspath(path))
return ((attrs >= 0) and (attrs & hidden_mask))
| [
"def",
"_is_hidden_win",
"(",
"path",
")",
":",
"hidden_mask",
"=",
"2",
"attrs",
"=",
"ctypes",
".",
"windll",
".",
"kernel32",
".",
"GetFileAttributesW",
"(",
"beets",
".",
"util",
".",
"syspath",
"(",
"path",
")",
")",
"return",
"(",
"(",
"attrs",
">=",
"0",
")",
"and",
"(",
"attrs",
"&",
"hidden_mask",
")",
")"
] | return whether or not a file is hidden on windows . | train | false |
47,597 | def load_raw_data(assets, dates, data_query_time, data_query_tz, expr, odo_kwargs, checkpoints=None):
(lower_dt, upper_dt) = normalize_data_query_bounds(dates[0], dates[(-1)], data_query_time, data_query_tz)
raw = ffill_query_in_range(expr, lower_dt, upper_dt, checkpoints=checkpoints, odo_kwargs=odo_kwargs)
sids = raw[SID_FIELD_NAME]
raw.drop(sids[(~ sids.isin(assets))].index, inplace=True)
if (data_query_time is not None):
normalize_timestamp_to_query_time(raw, data_query_time, data_query_tz, inplace=True, ts_field=TS_FIELD_NAME)
return raw
| [
"def",
"load_raw_data",
"(",
"assets",
",",
"dates",
",",
"data_query_time",
",",
"data_query_tz",
",",
"expr",
",",
"odo_kwargs",
",",
"checkpoints",
"=",
"None",
")",
":",
"(",
"lower_dt",
",",
"upper_dt",
")",
"=",
"normalize_data_query_bounds",
"(",
"dates",
"[",
"0",
"]",
",",
"dates",
"[",
"(",
"-",
"1",
")",
"]",
",",
"data_query_time",
",",
"data_query_tz",
")",
"raw",
"=",
"ffill_query_in_range",
"(",
"expr",
",",
"lower_dt",
",",
"upper_dt",
",",
"checkpoints",
"=",
"checkpoints",
",",
"odo_kwargs",
"=",
"odo_kwargs",
")",
"sids",
"=",
"raw",
"[",
"SID_FIELD_NAME",
"]",
"raw",
".",
"drop",
"(",
"sids",
"[",
"(",
"~",
"sids",
".",
"isin",
"(",
"assets",
")",
")",
"]",
".",
"index",
",",
"inplace",
"=",
"True",
")",
"if",
"(",
"data_query_time",
"is",
"not",
"None",
")",
":",
"normalize_timestamp_to_query_time",
"(",
"raw",
",",
"data_query_time",
",",
"data_query_tz",
",",
"inplace",
"=",
"True",
",",
"ts_field",
"=",
"TS_FIELD_NAME",
")",
"return",
"raw"
] | given an expression representing data to load . | train | false |
47,598 | def _token_callable(token, local_dict, global_dict, nextToken=None):
func = local_dict.get(token[1])
if (not func):
func = global_dict.get(token[1])
return (callable(func) and (not isinstance(func, sympy.Symbol)))
| [
"def",
"_token_callable",
"(",
"token",
",",
"local_dict",
",",
"global_dict",
",",
"nextToken",
"=",
"None",
")",
":",
"func",
"=",
"local_dict",
".",
"get",
"(",
"token",
"[",
"1",
"]",
")",
"if",
"(",
"not",
"func",
")",
":",
"func",
"=",
"global_dict",
".",
"get",
"(",
"token",
"[",
"1",
"]",
")",
"return",
"(",
"callable",
"(",
"func",
")",
"and",
"(",
"not",
"isinstance",
"(",
"func",
",",
"sympy",
".",
"Symbol",
")",
")",
")"
] | predicate for whether a token name represents a callable function . | train | false |
47,599 | def _split_mul(f, x):
from sympy import polarify, unpolarify
fac = S(1)
po = S(1)
g = S(1)
f = expand_power_base(f)
args = Mul.make_args(f)
for a in args:
if (a == x):
po *= x
elif (x not in a.free_symbols):
fac *= a
else:
if (a.is_Pow and (x not in a.exp.free_symbols)):
(c, t) = a.base.as_coeff_mul(x)
if (t != (x,)):
(c, t) = expand_mul(a.base).as_coeff_mul(x)
if (t == (x,)):
po *= (x ** a.exp)
fac *= unpolarify(polarify((c ** a.exp), subs=False))
continue
g *= a
return (fac, po, g)
| [
"def",
"_split_mul",
"(",
"f",
",",
"x",
")",
":",
"from",
"sympy",
"import",
"polarify",
",",
"unpolarify",
"fac",
"=",
"S",
"(",
"1",
")",
"po",
"=",
"S",
"(",
"1",
")",
"g",
"=",
"S",
"(",
"1",
")",
"f",
"=",
"expand_power_base",
"(",
"f",
")",
"args",
"=",
"Mul",
".",
"make_args",
"(",
"f",
")",
"for",
"a",
"in",
"args",
":",
"if",
"(",
"a",
"==",
"x",
")",
":",
"po",
"*=",
"x",
"elif",
"(",
"x",
"not",
"in",
"a",
".",
"free_symbols",
")",
":",
"fac",
"*=",
"a",
"else",
":",
"if",
"(",
"a",
".",
"is_Pow",
"and",
"(",
"x",
"not",
"in",
"a",
".",
"exp",
".",
"free_symbols",
")",
")",
":",
"(",
"c",
",",
"t",
")",
"=",
"a",
".",
"base",
".",
"as_coeff_mul",
"(",
"x",
")",
"if",
"(",
"t",
"!=",
"(",
"x",
",",
")",
")",
":",
"(",
"c",
",",
"t",
")",
"=",
"expand_mul",
"(",
"a",
".",
"base",
")",
".",
"as_coeff_mul",
"(",
"x",
")",
"if",
"(",
"t",
"==",
"(",
"x",
",",
")",
")",
":",
"po",
"*=",
"(",
"x",
"**",
"a",
".",
"exp",
")",
"fac",
"*=",
"unpolarify",
"(",
"polarify",
"(",
"(",
"c",
"**",
"a",
".",
"exp",
")",
",",
"subs",
"=",
"False",
")",
")",
"continue",
"g",
"*=",
"a",
"return",
"(",
"fac",
",",
"po",
",",
"g",
")"
] | split expression f into fac . | train | false |
47,601 | def _unmask_crc(masked_crc):
rot = ((masked_crc - _CRC_MASK_DELTA) & 4294967295L)
return (((rot >> 17) | (rot << 15)) & 4294967295L)
| [
"def",
"_unmask_crc",
"(",
"masked_crc",
")",
":",
"rot",
"=",
"(",
"(",
"masked_crc",
"-",
"_CRC_MASK_DELTA",
")",
"&",
"4294967295",
"L",
")",
"return",
"(",
"(",
"(",
"rot",
">>",
"17",
")",
"|",
"(",
"rot",
"<<",
"15",
")",
")",
"&",
"4294967295",
"L",
")"
] | unmask crc . | train | false |
47,602 | def client_info_callback(source_port, dest_ip, dest_port):
try:
proc = pyblame.util.find_connection_owner((None, source_port), (dest_ip, dest_port), relaxed=True)
if (psutil.version_info[0] < 2):
cmdline = proc.cmdline
else:
cmdline = proc.cmdline()
cmd = ' '.join(cmdline[:2])
logger.info(('Blame request for %s=>%s:%s owner:%s command:%s' % (source_port, dest_ip, dest_port, proc.pid, cmd)))
return [pyblame.blame.Application(cmd, 0)]
except ValueError:
logger.info(('Blame request for %s=>%s:%s unknown' % (source_port, dest_ip, dest_port)))
return None
| [
"def",
"client_info_callback",
"(",
"source_port",
",",
"dest_ip",
",",
"dest_port",
")",
":",
"try",
":",
"proc",
"=",
"pyblame",
".",
"util",
".",
"find_connection_owner",
"(",
"(",
"None",
",",
"source_port",
")",
",",
"(",
"dest_ip",
",",
"dest_port",
")",
",",
"relaxed",
"=",
"True",
")",
"if",
"(",
"psutil",
".",
"version_info",
"[",
"0",
"]",
"<",
"2",
")",
":",
"cmdline",
"=",
"proc",
".",
"cmdline",
"else",
":",
"cmdline",
"=",
"proc",
".",
"cmdline",
"(",
")",
"cmd",
"=",
"' '",
".",
"join",
"(",
"cmdline",
"[",
":",
"2",
"]",
")",
"logger",
".",
"info",
"(",
"(",
"'Blame request for %s=>%s:%s owner:%s command:%s'",
"%",
"(",
"source_port",
",",
"dest_ip",
",",
"dest_port",
",",
"proc",
".",
"pid",
",",
"cmd",
")",
")",
")",
"return",
"[",
"pyblame",
".",
"blame",
".",
"Application",
"(",
"cmd",
",",
"0",
")",
"]",
"except",
"ValueError",
":",
"logger",
".",
"info",
"(",
"(",
"'Blame request for %s=>%s:%s unknown'",
"%",
"(",
"source_port",
",",
"dest_ip",
",",
"dest_port",
")",
")",
")",
"return",
"None"
] | called when client information is requested . | train | false |
47,603 | def _sphere_to_cartesian(theta, phi, r):
z = (r * np.sin(phi))
rcos_phi = (r * np.cos(phi))
x = (rcos_phi * np.cos(theta))
y = (rcos_phi * np.sin(theta))
return (x, y, z)
| [
"def",
"_sphere_to_cartesian",
"(",
"theta",
",",
"phi",
",",
"r",
")",
":",
"z",
"=",
"(",
"r",
"*",
"np",
".",
"sin",
"(",
"phi",
")",
")",
"rcos_phi",
"=",
"(",
"r",
"*",
"np",
".",
"cos",
"(",
"phi",
")",
")",
"x",
"=",
"(",
"rcos_phi",
"*",
"np",
".",
"cos",
"(",
"theta",
")",
")",
"y",
"=",
"(",
"rcos_phi",
"*",
"np",
".",
"sin",
"(",
"theta",
")",
")",
"return",
"(",
"x",
",",
"y",
",",
"z",
")"
] | convert using old function . | train | false |
47,604 | def require_invoke_minversion(min_version, verbose=False):
try:
import invoke
invoke_version = invoke.__version__
except ImportError:
invoke_version = '__NOT_INSTALLED'
if (invoke_version < min_version):
message = ('REQUIRE: invoke.version >= %s (but was: %s)' % (min_version, invoke_version))
message += ('\nUSE: pip install invoke>=%s' % min_version)
raise VersionRequirementError(message)
INVOKE_VERSION = os.environ.get('INVOKE_VERSION', None)
if (verbose and (not INVOKE_VERSION)):
os.environ['INVOKE_VERSION'] = invoke_version
print(('USING: invoke.version=%s' % invoke_version))
| [
"def",
"require_invoke_minversion",
"(",
"min_version",
",",
"verbose",
"=",
"False",
")",
":",
"try",
":",
"import",
"invoke",
"invoke_version",
"=",
"invoke",
".",
"__version__",
"except",
"ImportError",
":",
"invoke_version",
"=",
"'__NOT_INSTALLED'",
"if",
"(",
"invoke_version",
"<",
"min_version",
")",
":",
"message",
"=",
"(",
"'REQUIRE: invoke.version >= %s (but was: %s)'",
"%",
"(",
"min_version",
",",
"invoke_version",
")",
")",
"message",
"+=",
"(",
"'\\nUSE: pip install invoke>=%s'",
"%",
"min_version",
")",
"raise",
"VersionRequirementError",
"(",
"message",
")",
"INVOKE_VERSION",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'INVOKE_VERSION'",
",",
"None",
")",
"if",
"(",
"verbose",
"and",
"(",
"not",
"INVOKE_VERSION",
")",
")",
":",
"os",
".",
"environ",
"[",
"'INVOKE_VERSION'",
"]",
"=",
"invoke_version",
"print",
"(",
"(",
"'USING: invoke.version=%s'",
"%",
"invoke_version",
")",
")"
] | ensures that :mod:invoke has at the least the . | train | true |
47,605 | def locate_unbalanced_start(unbalanced_start, pre_delete, post_delete):
while 1:
if (not unbalanced_start):
break
finding = unbalanced_start[0]
finding_name = finding.split()[0].strip('<>')
if (not post_delete):
break
next = post_delete[0]
if ((next is DEL_START) or (not next.startswith('<'))):
break
if (next[1] == '/'):
break
name = next.split()[0].strip('<>')
if (name == 'ins'):
break
assert (name != 'del'), ('Unexpected delete tag: %r' % next)
if (name == finding_name):
unbalanced_start.pop(0)
pre_delete.append(post_delete.pop(0))
else:
break
| [
"def",
"locate_unbalanced_start",
"(",
"unbalanced_start",
",",
"pre_delete",
",",
"post_delete",
")",
":",
"while",
"1",
":",
"if",
"(",
"not",
"unbalanced_start",
")",
":",
"break",
"finding",
"=",
"unbalanced_start",
"[",
"0",
"]",
"finding_name",
"=",
"finding",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
"'<>'",
")",
"if",
"(",
"not",
"post_delete",
")",
":",
"break",
"next",
"=",
"post_delete",
"[",
"0",
"]",
"if",
"(",
"(",
"next",
"is",
"DEL_START",
")",
"or",
"(",
"not",
"next",
".",
"startswith",
"(",
"'<'",
")",
")",
")",
":",
"break",
"if",
"(",
"next",
"[",
"1",
"]",
"==",
"'/'",
")",
":",
"break",
"name",
"=",
"next",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
"'<>'",
")",
"if",
"(",
"name",
"==",
"'ins'",
")",
":",
"break",
"assert",
"(",
"name",
"!=",
"'del'",
")",
",",
"(",
"'Unexpected delete tag: %r'",
"%",
"next",
")",
"if",
"(",
"name",
"==",
"finding_name",
")",
":",
"unbalanced_start",
".",
"pop",
"(",
"0",
")",
"pre_delete",
".",
"append",
"(",
"post_delete",
".",
"pop",
"(",
"0",
")",
")",
"else",
":",
"break"
] | pre_delete and post_delete implicitly point to a place in the document . | train | true |
47,606 | def host_keys(keydir=None, private=True):
if (not keydir):
if (__grains__['kernel'] == 'Linux'):
keydir = '/etc/ssh'
else:
raise SaltInvocationError('ssh.host_keys: Please specify a keydir')
keys = {}
for fn_ in os.listdir(keydir):
if fn_.startswith('ssh_host_'):
if ((fn_.endswith('.pub') is False) and (private is False)):
log.info('Skipping private key file {0} as private is set to False'.format(fn_))
continue
top = fn_.split('.')
comps = top[0].split('_')
kname = comps[2]
if (len(top) > 1):
kname += '.{0}'.format(top[1])
try:
with salt.utils.fopen(os.path.join(keydir, fn_), 'r') as _fh:
keys[kname] = _fh.readline()
if (keys[kname] != 'SSH PRIVATE KEY FILE FORMAT 1.1\n'):
keys[kname] += _fh.read()
keys[kname] = keys[kname].strip()
except (IOError, OSError):
keys[kname] = ''
return keys
| [
"def",
"host_keys",
"(",
"keydir",
"=",
"None",
",",
"private",
"=",
"True",
")",
":",
"if",
"(",
"not",
"keydir",
")",
":",
"if",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"==",
"'Linux'",
")",
":",
"keydir",
"=",
"'/etc/ssh'",
"else",
":",
"raise",
"SaltInvocationError",
"(",
"'ssh.host_keys: Please specify a keydir'",
")",
"keys",
"=",
"{",
"}",
"for",
"fn_",
"in",
"os",
".",
"listdir",
"(",
"keydir",
")",
":",
"if",
"fn_",
".",
"startswith",
"(",
"'ssh_host_'",
")",
":",
"if",
"(",
"(",
"fn_",
".",
"endswith",
"(",
"'.pub'",
")",
"is",
"False",
")",
"and",
"(",
"private",
"is",
"False",
")",
")",
":",
"log",
".",
"info",
"(",
"'Skipping private key file {0} as private is set to False'",
".",
"format",
"(",
"fn_",
")",
")",
"continue",
"top",
"=",
"fn_",
".",
"split",
"(",
"'.'",
")",
"comps",
"=",
"top",
"[",
"0",
"]",
".",
"split",
"(",
"'_'",
")",
"kname",
"=",
"comps",
"[",
"2",
"]",
"if",
"(",
"len",
"(",
"top",
")",
">",
"1",
")",
":",
"kname",
"+=",
"'.{0}'",
".",
"format",
"(",
"top",
"[",
"1",
"]",
")",
"try",
":",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"os",
".",
"path",
".",
"join",
"(",
"keydir",
",",
"fn_",
")",
",",
"'r'",
")",
"as",
"_fh",
":",
"keys",
"[",
"kname",
"]",
"=",
"_fh",
".",
"readline",
"(",
")",
"if",
"(",
"keys",
"[",
"kname",
"]",
"!=",
"'SSH PRIVATE KEY FILE FORMAT 1.1\\n'",
")",
":",
"keys",
"[",
"kname",
"]",
"+=",
"_fh",
".",
"read",
"(",
")",
"keys",
"[",
"kname",
"]",
"=",
"keys",
"[",
"kname",
"]",
".",
"strip",
"(",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
":",
"keys",
"[",
"kname",
"]",
"=",
"''",
"return",
"keys"
] | return the minions host keys cli example: . | train | false |
47,607 | def get_latest_episode(series):
session = Session.object_session(series)
episode = session.query(Episode).join(Episode.series).filter((Series.id == series.id)).filter((Episode.season != None)).order_by(desc(Episode.season)).order_by(desc(Episode.number)).first()
if (not episode):
return False
return episode
| [
"def",
"get_latest_episode",
"(",
"series",
")",
":",
"session",
"=",
"Session",
".",
"object_session",
"(",
"series",
")",
"episode",
"=",
"session",
".",
"query",
"(",
"Episode",
")",
".",
"join",
"(",
"Episode",
".",
"series",
")",
".",
"filter",
"(",
"(",
"Series",
".",
"id",
"==",
"series",
".",
"id",
")",
")",
".",
"filter",
"(",
"(",
"Episode",
".",
"season",
"!=",
"None",
")",
")",
".",
"order_by",
"(",
"desc",
"(",
"Episode",
".",
"season",
")",
")",
".",
"order_by",
"(",
"desc",
"(",
"Episode",
".",
"number",
")",
")",
".",
"first",
"(",
")",
"if",
"(",
"not",
"episode",
")",
":",
"return",
"False",
"return",
"episode"
] | return latest known identifier in dict for series name . | train | false |
47,609 | def MakePmfFromHist(hist, label=None):
if (label is None):
label = hist.label
return Pmf(hist, label=label)
| [
"def",
"MakePmfFromHist",
"(",
"hist",
",",
"label",
"=",
"None",
")",
":",
"if",
"(",
"label",
"is",
"None",
")",
":",
"label",
"=",
"hist",
".",
"label",
"return",
"Pmf",
"(",
"hist",
",",
"label",
"=",
"label",
")"
] | makes a normalized pmf from a hist object . | train | false |
47,610 | def generate_aliases_global(fieldfile, **kwargs):
from easy_thumbnails.files import generate_all_aliases
generate_all_aliases(fieldfile, include_global=True)
| [
"def",
"generate_aliases_global",
"(",
"fieldfile",
",",
"**",
"kwargs",
")",
":",
"from",
"easy_thumbnails",
".",
"files",
"import",
"generate_all_aliases",
"generate_all_aliases",
"(",
"fieldfile",
",",
"include_global",
"=",
"True",
")"
] | a saved_file signal handler which generates thumbnails for all field . | train | true |
47,612 | def get_members_from_bulk(bulk_data, **additional_fields):
members = []
for data in bulk_data:
data_copy = data.copy()
username = data_copy.pop('username')
try:
validate_email(username)
data_copy['email'] = username
except ValidationError:
user = User.objects.filter(username=username).first()
data_copy['user_id'] = user.id
data_copy.update(additional_fields)
members.append(models.Membership(**data_copy))
return members
| [
"def",
"get_members_from_bulk",
"(",
"bulk_data",
",",
"**",
"additional_fields",
")",
":",
"members",
"=",
"[",
"]",
"for",
"data",
"in",
"bulk_data",
":",
"data_copy",
"=",
"data",
".",
"copy",
"(",
")",
"username",
"=",
"data_copy",
".",
"pop",
"(",
"'username'",
")",
"try",
":",
"validate_email",
"(",
"username",
")",
"data_copy",
"[",
"'email'",
"]",
"=",
"username",
"except",
"ValidationError",
":",
"user",
"=",
"User",
".",
"objects",
".",
"filter",
"(",
"username",
"=",
"username",
")",
".",
"first",
"(",
")",
"data_copy",
"[",
"'user_id'",
"]",
"=",
"user",
".",
"id",
"data_copy",
".",
"update",
"(",
"additional_fields",
")",
"members",
".",
"append",
"(",
"models",
".",
"Membership",
"(",
"**",
"data_copy",
")",
")",
"return",
"members"
] | convert bulk_data into a list of members . | train | false |
47,615 | def test_import_list_empty_string():
x = __import__('testpkg1', {}, {}, [''])
Assert((not ('' in dir(x))))
| [
"def",
"test_import_list_empty_string",
"(",
")",
":",
"x",
"=",
"__import__",
"(",
"'testpkg1'",
",",
"{",
"}",
",",
"{",
"}",
",",
"[",
"''",
"]",
")",
"Assert",
"(",
"(",
"not",
"(",
"''",
"in",
"dir",
"(",
"x",
")",
")",
")",
")"
] | importing w/ an empty string in the from list should be ignored . | train | false |
47,616 | def does_not_modify_other_keys_in_errors_dict(validator):
def call_and_assert(key, data, errors, context=None):
if (context is None):
context = {}
original_data = copy.deepcopy(data)
original_errors = copy.deepcopy(errors)
original_context = copy.deepcopy(context)
result = validator(key, data, errors, context=context)
if (key in errors):
del errors[key]
if (key in original_errors):
del original_errors[key]
assert (errors.keys() == original_errors.keys()), 'Should not add or remove keys from errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'.format(key=key, data=original_data, errors=original_errors, context=original_context)
for key_ in errors:
assert (errors[key_] == original_errors[key_]), 'Should not modify other keys in errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'.format(key=key, data=original_data, errors=original_errors, context=original_context)
return result
return call_and_assert
| [
"def",
"does_not_modify_other_keys_in_errors_dict",
"(",
"validator",
")",
":",
"def",
"call_and_assert",
"(",
"key",
",",
"data",
",",
"errors",
",",
"context",
"=",
"None",
")",
":",
"if",
"(",
"context",
"is",
"None",
")",
":",
"context",
"=",
"{",
"}",
"original_data",
"=",
"copy",
".",
"deepcopy",
"(",
"data",
")",
"original_errors",
"=",
"copy",
".",
"deepcopy",
"(",
"errors",
")",
"original_context",
"=",
"copy",
".",
"deepcopy",
"(",
"context",
")",
"result",
"=",
"validator",
"(",
"key",
",",
"data",
",",
"errors",
",",
"context",
"=",
"context",
")",
"if",
"(",
"key",
"in",
"errors",
")",
":",
"del",
"errors",
"[",
"key",
"]",
"if",
"(",
"key",
"in",
"original_errors",
")",
":",
"del",
"original_errors",
"[",
"key",
"]",
"assert",
"(",
"errors",
".",
"keys",
"(",
")",
"==",
"original_errors",
".",
"keys",
"(",
")",
")",
",",
"'Should not add or remove keys from errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'",
".",
"format",
"(",
"key",
"=",
"key",
",",
"data",
"=",
"original_data",
",",
"errors",
"=",
"original_errors",
",",
"context",
"=",
"original_context",
")",
"for",
"key_",
"in",
"errors",
":",
"assert",
"(",
"errors",
"[",
"key_",
"]",
"==",
"original_errors",
"[",
"key_",
"]",
")",
",",
"'Should not modify other keys in errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'",
".",
"format",
"(",
"key",
"=",
"key",
",",
"data",
"=",
"original_data",
",",
"errors",
"=",
"original_errors",
",",
"context",
"=",
"original_context",
")",
"return",
"result",
"return",
"call_and_assert"
] | a decorator that asserts that the decorated validator doesnt add . | train | false |
47,617 | def EvalNormalPdf(x, mu, sigma):
return stats.norm.pdf(x, mu, sigma)
| [
"def",
"EvalNormalPdf",
"(",
"x",
",",
"mu",
",",
"sigma",
")",
":",
"return",
"stats",
".",
"norm",
".",
"pdf",
"(",
"x",
",",
"mu",
",",
"sigma",
")"
] | computes the unnormalized pdf of the normal distribution . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.