id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
6,217
|
def _get_capa_types():
capa_types = {tag: _get_human_name(registry.get_class_for_tag(tag)) for tag in registry.registered_tags()}
return ([{'value': ANY_CAPA_TYPE_VALUE, 'display_name': _('Any Type')}] + sorted([{'value': capa_type, 'display_name': caption} for (capa_type, caption) in capa_types.items()], key=(lambda item: item.get('display_name'))))
|
[
"def",
"_get_capa_types",
"(",
")",
":",
"capa_types",
"=",
"{",
"tag",
":",
"_get_human_name",
"(",
"registry",
".",
"get_class_for_tag",
"(",
"tag",
")",
")",
"for",
"tag",
"in",
"registry",
".",
"registered_tags",
"(",
")",
"}",
"return",
"(",
"[",
"{",
"'value'",
":",
"ANY_CAPA_TYPE_VALUE",
",",
"'display_name'",
":",
"_",
"(",
"'Any Type'",
")",
"}",
"]",
"+",
"sorted",
"(",
"[",
"{",
"'value'",
":",
"capa_type",
",",
"'display_name'",
":",
"caption",
"}",
"for",
"(",
"capa_type",
",",
"caption",
")",
"in",
"capa_types",
".",
"items",
"(",
")",
"]",
",",
"key",
"=",
"(",
"lambda",
"item",
":",
"item",
".",
"get",
"(",
"'display_name'",
")",
")",
")",
")"
] |
gets capa types tags and labels .
|
train
| false
|
6,218
|
def _best_mime():
supported = []
renders = {}
for mime in _MIME_TYPES:
if app.config.get(mime['tag'], True):
for mime_type in mime['mime']:
supported.append(mime_type)
renders[mime_type] = mime['renderer']
if (len(supported) == 0):
abort(500, description=debug_error_message('Configuration error: no supported mime types'))
best_match = (request.accept_mimetypes.best_match(supported) or supported[0])
return (best_match, renders[best_match])
|
[
"def",
"_best_mime",
"(",
")",
":",
"supported",
"=",
"[",
"]",
"renders",
"=",
"{",
"}",
"for",
"mime",
"in",
"_MIME_TYPES",
":",
"if",
"app",
".",
"config",
".",
"get",
"(",
"mime",
"[",
"'tag'",
"]",
",",
"True",
")",
":",
"for",
"mime_type",
"in",
"mime",
"[",
"'mime'",
"]",
":",
"supported",
".",
"append",
"(",
"mime_type",
")",
"renders",
"[",
"mime_type",
"]",
"=",
"mime",
"[",
"'renderer'",
"]",
"if",
"(",
"len",
"(",
"supported",
")",
"==",
"0",
")",
":",
"abort",
"(",
"500",
",",
"description",
"=",
"debug_error_message",
"(",
"'Configuration error: no supported mime types'",
")",
")",
"best_match",
"=",
"(",
"request",
".",
"accept_mimetypes",
".",
"best_match",
"(",
"supported",
")",
"or",
"supported",
"[",
"0",
"]",
")",
"return",
"(",
"best_match",
",",
"renders",
"[",
"best_match",
"]",
")"
] |
returns the best match between the requested mime type and the ones supported by eve .
|
train
| false
|
6,219
|
def test_refresh_with_callbacks(refresher):
callbacks = [Mock()]
pgexecute_class = Mock()
pgexecute = Mock()
special = Mock()
with patch('pgcli.completion_refresher.PGExecute', pgexecute_class):
refresher.refreshers = {}
refresher.refresh(pgexecute, special, callbacks)
time.sleep(1)
assert (callbacks[0].call_count == 1)
|
[
"def",
"test_refresh_with_callbacks",
"(",
"refresher",
")",
":",
"callbacks",
"=",
"[",
"Mock",
"(",
")",
"]",
"pgexecute_class",
"=",
"Mock",
"(",
")",
"pgexecute",
"=",
"Mock",
"(",
")",
"special",
"=",
"Mock",
"(",
")",
"with",
"patch",
"(",
"'pgcli.completion_refresher.PGExecute'",
",",
"pgexecute_class",
")",
":",
"refresher",
".",
"refreshers",
"=",
"{",
"}",
"refresher",
".",
"refresh",
"(",
"pgexecute",
",",
"special",
",",
"callbacks",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"assert",
"(",
"callbacks",
"[",
"0",
"]",
".",
"call_count",
"==",
"1",
")"
] |
callbacks must be called .
|
train
| false
|
6,220
|
def make_naive(value, timezone):
value = value.astimezone(timezone)
if hasattr(timezone, 'normalize'):
value = timezone.normalize(value)
return value.replace(tzinfo=None)
|
[
"def",
"make_naive",
"(",
"value",
",",
"timezone",
")",
":",
"value",
"=",
"value",
".",
"astimezone",
"(",
"timezone",
")",
"if",
"hasattr",
"(",
"timezone",
",",
"'normalize'",
")",
":",
"value",
"=",
"timezone",
".",
"normalize",
"(",
"value",
")",
"return",
"value",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")"
] |
makes an aware datetime .
|
train
| true
|
6,221
|
@image_comparison(baseline_images=[u'test_eventplot_defaults'], extensions=[u'png'], remove_text=True)
def test_eventplot_defaults():
np.random.seed(0)
data1 = np.random.random([32, 20]).tolist()
data2 = np.random.random([6, 20]).tolist()
data = (data1 + data2)
fig = plt.figure()
axobj = fig.add_subplot(111)
colls = axobj.eventplot(data)
|
[
"@",
"image_comparison",
"(",
"baseline_images",
"=",
"[",
"u'test_eventplot_defaults'",
"]",
",",
"extensions",
"=",
"[",
"u'png'",
"]",
",",
"remove_text",
"=",
"True",
")",
"def",
"test_eventplot_defaults",
"(",
")",
":",
"np",
".",
"random",
".",
"seed",
"(",
"0",
")",
"data1",
"=",
"np",
".",
"random",
".",
"random",
"(",
"[",
"32",
",",
"20",
"]",
")",
".",
"tolist",
"(",
")",
"data2",
"=",
"np",
".",
"random",
".",
"random",
"(",
"[",
"6",
",",
"20",
"]",
")",
".",
"tolist",
"(",
")",
"data",
"=",
"(",
"data1",
"+",
"data2",
")",
"fig",
"=",
"plt",
".",
"figure",
"(",
")",
"axobj",
"=",
"fig",
".",
"add_subplot",
"(",
"111",
")",
"colls",
"=",
"axobj",
".",
"eventplot",
"(",
"data",
")"
] |
test that eventplot produces the correct output given the default params .
|
train
| false
|
6,224
|
def format_group(group, show_url=True):
out = '\x02{}\x02'.format(group['name'])
if group['description']:
out += ': "{}"'.format(formatting.truncate(group['description']))
out += ' - Owned by \x02{}\x02.'.format(group['creator']['username'])
if show_url:
out += ' - {}'.format(web.try_shorten(group['permalink_url']))
return out
|
[
"def",
"format_group",
"(",
"group",
",",
"show_url",
"=",
"True",
")",
":",
"out",
"=",
"'\\x02{}\\x02'",
".",
"format",
"(",
"group",
"[",
"'name'",
"]",
")",
"if",
"group",
"[",
"'description'",
"]",
":",
"out",
"+=",
"': \"{}\"'",
".",
"format",
"(",
"formatting",
".",
"truncate",
"(",
"group",
"[",
"'description'",
"]",
")",
")",
"out",
"+=",
"' - Owned by \\x02{}\\x02.'",
".",
"format",
"(",
"group",
"[",
"'creator'",
"]",
"[",
"'username'",
"]",
")",
"if",
"show_url",
":",
"out",
"+=",
"' - {}'",
".",
"format",
"(",
"web",
".",
"try_shorten",
"(",
"group",
"[",
"'permalink_url'",
"]",
")",
")",
"return",
"out"
] |
takes a soundcloud group and returns a formatting string .
|
train
| false
|
6,225
|
def salt_spm():
import salt.cli.spm
spm = salt.cli.spm.SPM()
spm.run()
|
[
"def",
"salt_spm",
"(",
")",
":",
"import",
"salt",
".",
"cli",
".",
"spm",
"spm",
"=",
"salt",
".",
"cli",
".",
"spm",
".",
"SPM",
"(",
")",
"spm",
".",
"run",
"(",
")"
] |
the main function for spm .
|
train
| true
|
6,226
|
def _paramsFileHead():
str = (getCopyrightHead() + '\n\n## This file defines parameters for a prediction experiment.\n\n###############################################################################\n# IMPORTANT!!!\n# This params file is dynamically generated by the RunExperimentPermutations\n# script. Any changes made manually will be over-written the next time\n# RunExperimentPermutations is run!!!\n###############################################################################\n\n\nfrom nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription\n\n# the sub-experiment configuration\nconfig ={\n')
return str
|
[
"def",
"_paramsFileHead",
"(",
")",
":",
"str",
"=",
"(",
"getCopyrightHead",
"(",
")",
"+",
"'\\n\\n## This file defines parameters for a prediction experiment.\\n\\n###############################################################################\\n# IMPORTANT!!!\\n# This params file is dynamically generated by the RunExperimentPermutations\\n# script. Any changes made manually will be over-written the next time\\n# RunExperimentPermutations is run!!!\\n###############################################################################\\n\\n\\nfrom nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription\\n\\n# the sub-experiment configuration\\nconfig ={\\n'",
")",
"return",
"str"
] |
this is the first portion of every sub-experiment params file we generate .
|
train
| false
|
6,227
|
def get_profit_data(year, quarter):
if (ct._check_input(year, quarter) is True):
ct._write_head()
data = _get_profit_data(year, quarter, 1, pd.DataFrame())
if (data is not None):
data['code'] = data['code'].map((lambda x: str(x).zfill(6)))
return data
|
[
"def",
"get_profit_data",
"(",
"year",
",",
"quarter",
")",
":",
"if",
"(",
"ct",
".",
"_check_input",
"(",
"year",
",",
"quarter",
")",
"is",
"True",
")",
":",
"ct",
".",
"_write_head",
"(",
")",
"data",
"=",
"_get_profit_data",
"(",
"year",
",",
"quarter",
",",
"1",
",",
"pd",
".",
"DataFrame",
"(",
")",
")",
"if",
"(",
"data",
"is",
"not",
"None",
")",
":",
"data",
"[",
"'code'",
"]",
"=",
"data",
"[",
"'code'",
"]",
".",
"map",
"(",
"(",
"lambda",
"x",
":",
"str",
"(",
"x",
")",
".",
"zfill",
"(",
"6",
")",
")",
")",
"return",
"data"
] |
parameters year:int 年度 e .
|
train
| false
|
6,228
|
def minor_version(best=False):
return _distro.minor_version(best)
|
[
"def",
"minor_version",
"(",
"best",
"=",
"False",
")",
":",
"return",
"_distro",
".",
"minor_version",
"(",
"best",
")"
] |
return the minor version of the current linux distribution .
|
train
| false
|
6,230
|
def getChanges(request, options=None):
payload = json.loads(request.args['payload'][0])
repo_url = ('%s%s' % (payload['canon_url'], payload['repository']['absolute_url']))
project = request.args.get('project', [''])[0]
changes = []
for commit in payload['commits']:
changes.append({'author': commit['raw_author'], 'files': [f['file'] for f in commit['files']], 'comments': commit['message'], 'revision': commit['raw_node'], 'when_timestamp': dateparse(commit['utctimestamp']), 'branch': commit['branch'], 'revlink': ('%scommits/%s' % (repo_url, commit['raw_node'])), 'repository': repo_url, 'project': project})
log.msg(('New revision: %s' % (commit['node'],)))
log.msg(('Received %s changes from bitbucket' % (len(changes),)))
return (changes, payload['repository']['scm'])
|
[
"def",
"getChanges",
"(",
"request",
",",
"options",
"=",
"None",
")",
":",
"payload",
"=",
"json",
".",
"loads",
"(",
"request",
".",
"args",
"[",
"'payload'",
"]",
"[",
"0",
"]",
")",
"repo_url",
"=",
"(",
"'%s%s'",
"%",
"(",
"payload",
"[",
"'canon_url'",
"]",
",",
"payload",
"[",
"'repository'",
"]",
"[",
"'absolute_url'",
"]",
")",
")",
"project",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'project'",
",",
"[",
"''",
"]",
")",
"[",
"0",
"]",
"changes",
"=",
"[",
"]",
"for",
"commit",
"in",
"payload",
"[",
"'commits'",
"]",
":",
"changes",
".",
"append",
"(",
"{",
"'author'",
":",
"commit",
"[",
"'raw_author'",
"]",
",",
"'files'",
":",
"[",
"f",
"[",
"'file'",
"]",
"for",
"f",
"in",
"commit",
"[",
"'files'",
"]",
"]",
",",
"'comments'",
":",
"commit",
"[",
"'message'",
"]",
",",
"'revision'",
":",
"commit",
"[",
"'raw_node'",
"]",
",",
"'when_timestamp'",
":",
"dateparse",
"(",
"commit",
"[",
"'utctimestamp'",
"]",
")",
",",
"'branch'",
":",
"commit",
"[",
"'branch'",
"]",
",",
"'revlink'",
":",
"(",
"'%scommits/%s'",
"%",
"(",
"repo_url",
",",
"commit",
"[",
"'raw_node'",
"]",
")",
")",
",",
"'repository'",
":",
"repo_url",
",",
"'project'",
":",
"project",
"}",
")",
"log",
".",
"msg",
"(",
"(",
"'New revision: %s'",
"%",
"(",
"commit",
"[",
"'node'",
"]",
",",
")",
")",
")",
"log",
".",
"msg",
"(",
"(",
"'Received %s changes from bitbucket'",
"%",
"(",
"len",
"(",
"changes",
")",
",",
")",
")",
")",
"return",
"(",
"changes",
",",
"payload",
"[",
"'repository'",
"]",
"[",
"'scm'",
"]",
")"
] |
reponds only to post events and starts the build process :arguments: request the http request object .
|
train
| false
|
6,234
|
def package_absent(m, name):
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
(name_install, name_remove, urls) = get_want_state(m, name, remove=True)
if name_install:
m.fail_json(msg="Can not combine '+' prefix with state=remove/absent.")
if urls:
m.fail_json(msg='Can not remove via URL.')
if (m.params['type'] == 'patch'):
m.fail_json(msg='Can not remove patches.')
prerun_state = get_installed_state(m, name_remove)
remove_version = [(p + name_remove[p]) for p in name_remove if name_remove[p]]
name_remove = [p for p in name_remove if (p in prerun_state)]
if ((not name_remove) and (not remove_version)):
return (None, retvals)
cmd = get_cmd(m, 'remove')
cmd.extend(name_remove)
cmd.extend(remove_version)
retvals['cmd'] = cmd
(result, retvals['rc'], retvals['stdout'], retvals['stderr']) = parse_zypper_xml(m, cmd)
return (result, retvals)
|
[
"def",
"package_absent",
"(",
"m",
",",
"name",
")",
":",
"retvals",
"=",
"{",
"'rc'",
":",
"0",
",",
"'stdout'",
":",
"''",
",",
"'stderr'",
":",
"''",
"}",
"(",
"name_install",
",",
"name_remove",
",",
"urls",
")",
"=",
"get_want_state",
"(",
"m",
",",
"name",
",",
"remove",
"=",
"True",
")",
"if",
"name_install",
":",
"m",
".",
"fail_json",
"(",
"msg",
"=",
"\"Can not combine '+' prefix with state=remove/absent.\"",
")",
"if",
"urls",
":",
"m",
".",
"fail_json",
"(",
"msg",
"=",
"'Can not remove via URL.'",
")",
"if",
"(",
"m",
".",
"params",
"[",
"'type'",
"]",
"==",
"'patch'",
")",
":",
"m",
".",
"fail_json",
"(",
"msg",
"=",
"'Can not remove patches.'",
")",
"prerun_state",
"=",
"get_installed_state",
"(",
"m",
",",
"name_remove",
")",
"remove_version",
"=",
"[",
"(",
"p",
"+",
"name_remove",
"[",
"p",
"]",
")",
"for",
"p",
"in",
"name_remove",
"if",
"name_remove",
"[",
"p",
"]",
"]",
"name_remove",
"=",
"[",
"p",
"for",
"p",
"in",
"name_remove",
"if",
"(",
"p",
"in",
"prerun_state",
")",
"]",
"if",
"(",
"(",
"not",
"name_remove",
")",
"and",
"(",
"not",
"remove_version",
")",
")",
":",
"return",
"(",
"None",
",",
"retvals",
")",
"cmd",
"=",
"get_cmd",
"(",
"m",
",",
"'remove'",
")",
"cmd",
".",
"extend",
"(",
"name_remove",
")",
"cmd",
".",
"extend",
"(",
"remove_version",
")",
"retvals",
"[",
"'cmd'",
"]",
"=",
"cmd",
"(",
"result",
",",
"retvals",
"[",
"'rc'",
"]",
",",
"retvals",
"[",
"'stdout'",
"]",
",",
"retvals",
"[",
"'stderr'",
"]",
")",
"=",
"parse_zypper_xml",
"(",
"m",
",",
"cmd",
")",
"return",
"(",
"result",
",",
"retvals",
")"
] |
remove the packages in name .
|
train
| false
|
6,235
|
def test_autocall_binops():
ip.magic('autocall 2')
f = (lambda x: x)
ip.user_ns['f'] = f
try:
nt.assert_equal(ip.prefilter('f 1'), 'f(1)')
for t in ['f +1', 'f -1']:
nt.assert_equal(ip.prefilter(t), t)
pm = ip.prefilter_manager
ac = AutocallChecker(shell=pm.shell, prefilter_manager=pm, config=pm.config)
try:
ac.priority = 1
ac.exclude_regexp = '^[,&^\\|\\*/]|^is |^not |^in |^and |^or '
pm.sort_checkers()
nt.assert_equal(ip.prefilter('f -1'), 'f(-1)')
nt.assert_equal(ip.prefilter('f +1'), 'f(+1)')
finally:
pm.unregister_checker(ac)
finally:
ip.magic('autocall 0')
del ip.user_ns['f']
|
[
"def",
"test_autocall_binops",
"(",
")",
":",
"ip",
".",
"magic",
"(",
"'autocall 2'",
")",
"f",
"=",
"(",
"lambda",
"x",
":",
"x",
")",
"ip",
".",
"user_ns",
"[",
"'f'",
"]",
"=",
"f",
"try",
":",
"nt",
".",
"assert_equal",
"(",
"ip",
".",
"prefilter",
"(",
"'f 1'",
")",
",",
"'f(1)'",
")",
"for",
"t",
"in",
"[",
"'f +1'",
",",
"'f -1'",
"]",
":",
"nt",
".",
"assert_equal",
"(",
"ip",
".",
"prefilter",
"(",
"t",
")",
",",
"t",
")",
"pm",
"=",
"ip",
".",
"prefilter_manager",
"ac",
"=",
"AutocallChecker",
"(",
"shell",
"=",
"pm",
".",
"shell",
",",
"prefilter_manager",
"=",
"pm",
",",
"config",
"=",
"pm",
".",
"config",
")",
"try",
":",
"ac",
".",
"priority",
"=",
"1",
"ac",
".",
"exclude_regexp",
"=",
"'^[,&^\\\\|\\\\*/]|^is |^not |^in |^and |^or '",
"pm",
".",
"sort_checkers",
"(",
")",
"nt",
".",
"assert_equal",
"(",
"ip",
".",
"prefilter",
"(",
"'f -1'",
")",
",",
"'f(-1)'",
")",
"nt",
".",
"assert_equal",
"(",
"ip",
".",
"prefilter",
"(",
"'f +1'",
")",
",",
"'f(+1)'",
")",
"finally",
":",
"pm",
".",
"unregister_checker",
"(",
"ac",
")",
"finally",
":",
"ip",
".",
"magic",
"(",
"'autocall 0'",
")",
"del",
"ip",
".",
"user_ns",
"[",
"'f'",
"]"
] |
see URL .
|
train
| false
|
6,237
|
def set_guess(guess):
global _GUESS
_GUESS = guess
|
[
"def",
"set_guess",
"(",
"guess",
")",
":",
"global",
"_GUESS",
"_GUESS",
"=",
"guess"
] |
set the default value of the guess parameter for read() parameters guess : bool new default guess value .
|
train
| false
|
6,238
|
def resolve_stream_name(streams, stream_name):
if ((stream_name in STREAM_SYNONYMS) and (stream_name in streams)):
for (name, stream) in streams.items():
if ((stream is streams[stream_name]) and (name not in STREAM_SYNONYMS)):
return name
return stream_name
|
[
"def",
"resolve_stream_name",
"(",
"streams",
",",
"stream_name",
")",
":",
"if",
"(",
"(",
"stream_name",
"in",
"STREAM_SYNONYMS",
")",
"and",
"(",
"stream_name",
"in",
"streams",
")",
")",
":",
"for",
"(",
"name",
",",
"stream",
")",
"in",
"streams",
".",
"items",
"(",
")",
":",
"if",
"(",
"(",
"stream",
"is",
"streams",
"[",
"stream_name",
"]",
")",
"and",
"(",
"name",
"not",
"in",
"STREAM_SYNONYMS",
")",
")",
":",
"return",
"name",
"return",
"stream_name"
] |
returns the real stream name of a synonym .
|
train
| true
|
6,239
|
def find_all_tests(suite):
suites = [suite]
while suites:
s = suites.pop()
try:
suites.extend(s)
except TypeError:
(yield (s, ('%s.%s.%s' % (s.__class__.__module__, s.__class__.__name__, s._testMethodName))))
|
[
"def",
"find_all_tests",
"(",
"suite",
")",
":",
"suites",
"=",
"[",
"suite",
"]",
"while",
"suites",
":",
"s",
"=",
"suites",
".",
"pop",
"(",
")",
"try",
":",
"suites",
".",
"extend",
"(",
"s",
")",
"except",
"TypeError",
":",
"(",
"yield",
"(",
"s",
",",
"(",
"'%s.%s.%s'",
"%",
"(",
"s",
".",
"__class__",
".",
"__module__",
",",
"s",
".",
"__class__",
".",
"__name__",
",",
"s",
".",
"_testMethodName",
")",
")",
")",
")"
] |
yields all the tests and their names from a given suite .
|
train
| false
|
6,240
|
def get_outbound_pdus(text, recipient):
encoding = 'ucs2'
try:
encoded_text = text.encode('gsm')
encoding = 'gsm'
except:
encoded_text = text
csm_max = MSG_LIMITS[encoding][1]
if (len(encoded_text) > (MAX_CSM_SEGMENTS * csm_max)):
raise ValueError('Message text too long')
if (len(encoded_text) <= MSG_LIMITS[encoding][0]):
return [OutboundGsmPdu(text, recipient)]
with __ref_lock:
if (recipient not in __csm_refs):
__csm_refs[recipient] = 0
csm_ref = (__csm_refs[recipient] % 256)
__csm_refs[recipient] += 1
num = int(math.ceil((len(encoded_text) / float(MSG_LIMITS[encoding][0]))))
pdus = []
for seq in range(num):
i = (seq * csm_max)
seg_txt = encoded_text[i:(i + csm_max)]
if (encoding == 'gsm'):
seg_txt = seg_txt.decode('gsm')
pdus.append(OutboundGsmPdu(seg_txt, recipient, csm_ref=csm_ref, csm_seq=(seq + 1), csm_total=num))
return pdus
|
[
"def",
"get_outbound_pdus",
"(",
"text",
",",
"recipient",
")",
":",
"encoding",
"=",
"'ucs2'",
"try",
":",
"encoded_text",
"=",
"text",
".",
"encode",
"(",
"'gsm'",
")",
"encoding",
"=",
"'gsm'",
"except",
":",
"encoded_text",
"=",
"text",
"csm_max",
"=",
"MSG_LIMITS",
"[",
"encoding",
"]",
"[",
"1",
"]",
"if",
"(",
"len",
"(",
"encoded_text",
")",
">",
"(",
"MAX_CSM_SEGMENTS",
"*",
"csm_max",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Message text too long'",
")",
"if",
"(",
"len",
"(",
"encoded_text",
")",
"<=",
"MSG_LIMITS",
"[",
"encoding",
"]",
"[",
"0",
"]",
")",
":",
"return",
"[",
"OutboundGsmPdu",
"(",
"text",
",",
"recipient",
")",
"]",
"with",
"__ref_lock",
":",
"if",
"(",
"recipient",
"not",
"in",
"__csm_refs",
")",
":",
"__csm_refs",
"[",
"recipient",
"]",
"=",
"0",
"csm_ref",
"=",
"(",
"__csm_refs",
"[",
"recipient",
"]",
"%",
"256",
")",
"__csm_refs",
"[",
"recipient",
"]",
"+=",
"1",
"num",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"(",
"len",
"(",
"encoded_text",
")",
"/",
"float",
"(",
"MSG_LIMITS",
"[",
"encoding",
"]",
"[",
"0",
"]",
")",
")",
")",
")",
"pdus",
"=",
"[",
"]",
"for",
"seq",
"in",
"range",
"(",
"num",
")",
":",
"i",
"=",
"(",
"seq",
"*",
"csm_max",
")",
"seg_txt",
"=",
"encoded_text",
"[",
"i",
":",
"(",
"i",
"+",
"csm_max",
")",
"]",
"if",
"(",
"encoding",
"==",
"'gsm'",
")",
":",
"seg_txt",
"=",
"seg_txt",
".",
"decode",
"(",
"'gsm'",
")",
"pdus",
".",
"append",
"(",
"OutboundGsmPdu",
"(",
"seg_txt",
",",
"recipient",
",",
"csm_ref",
"=",
"csm_ref",
",",
"csm_seq",
"=",
"(",
"seq",
"+",
"1",
")",
",",
"csm_total",
"=",
"num",
")",
")",
"return",
"pdus"
] |
returns a list of pdus to send the provided text to the given recipient .
|
train
| false
|
6,242
|
def __converting_factory(specimen_cls, original_factory):
instrumented_cls = __canned_instrumentation[specimen_cls]
def wrapper():
collection = original_factory()
return instrumented_cls(collection)
wrapper.__name__ = ('%sWrapper' % original_factory.__name__)
wrapper.__doc__ = original_factory.__doc__
return wrapper
|
[
"def",
"__converting_factory",
"(",
"specimen_cls",
",",
"original_factory",
")",
":",
"instrumented_cls",
"=",
"__canned_instrumentation",
"[",
"specimen_cls",
"]",
"def",
"wrapper",
"(",
")",
":",
"collection",
"=",
"original_factory",
"(",
")",
"return",
"instrumented_cls",
"(",
"collection",
")",
"wrapper",
".",
"__name__",
"=",
"(",
"'%sWrapper'",
"%",
"original_factory",
".",
"__name__",
")",
"wrapper",
".",
"__doc__",
"=",
"original_factory",
".",
"__doc__",
"return",
"wrapper"
] |
return a wrapper that converts a "canned" collection like set .
|
train
| false
|
6,243
|
def run_path(path_name, init_globals=None, run_name=None):
if (run_name is None):
run_name = '<run_path>'
importer = _get_importer(path_name)
if isinstance(importer, imp.NullImporter):
code = _get_code_from_file(path_name)
return _run_module_code(code, init_globals, run_name, path_name)
else:
sys.path.insert(0, path_name)
try:
main_name = '__main__'
saved_main = sys.modules[main_name]
del sys.modules[main_name]
try:
(mod_name, loader, code, fname) = _get_main_module_details()
finally:
sys.modules[main_name] = saved_main
pkg_name = ''
with _TempModule(run_name) as temp_module:
with _ModifiedArgv0(path_name):
mod_globals = temp_module.module.__dict__
return _run_code(code, mod_globals, init_globals, run_name, fname, loader, pkg_name).copy()
finally:
try:
sys.path.remove(path_name)
except ValueError:
pass
|
[
"def",
"run_path",
"(",
"path_name",
",",
"init_globals",
"=",
"None",
",",
"run_name",
"=",
"None",
")",
":",
"if",
"(",
"run_name",
"is",
"None",
")",
":",
"run_name",
"=",
"'<run_path>'",
"importer",
"=",
"_get_importer",
"(",
"path_name",
")",
"if",
"isinstance",
"(",
"importer",
",",
"imp",
".",
"NullImporter",
")",
":",
"code",
"=",
"_get_code_from_file",
"(",
"path_name",
")",
"return",
"_run_module_code",
"(",
"code",
",",
"init_globals",
",",
"run_name",
",",
"path_name",
")",
"else",
":",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"path_name",
")",
"try",
":",
"main_name",
"=",
"'__main__'",
"saved_main",
"=",
"sys",
".",
"modules",
"[",
"main_name",
"]",
"del",
"sys",
".",
"modules",
"[",
"main_name",
"]",
"try",
":",
"(",
"mod_name",
",",
"loader",
",",
"code",
",",
"fname",
")",
"=",
"_get_main_module_details",
"(",
")",
"finally",
":",
"sys",
".",
"modules",
"[",
"main_name",
"]",
"=",
"saved_main",
"pkg_name",
"=",
"''",
"with",
"_TempModule",
"(",
"run_name",
")",
"as",
"temp_module",
":",
"with",
"_ModifiedArgv0",
"(",
"path_name",
")",
":",
"mod_globals",
"=",
"temp_module",
".",
"module",
".",
"__dict__",
"return",
"_run_code",
"(",
"code",
",",
"mod_globals",
",",
"init_globals",
",",
"run_name",
",",
"fname",
",",
"loader",
",",
"pkg_name",
")",
".",
"copy",
"(",
")",
"finally",
":",
"try",
":",
"sys",
".",
"path",
".",
"remove",
"(",
"path_name",
")",
"except",
"ValueError",
":",
"pass"
] |
execute code located at the specified filesystem location returns the resulting top level namespace dictionary the file path may refer directly to a python script or else it may refer to a zipfile or directory containing a top level __main__ .
|
train
| true
|
6,244
|
def rank_order(image):
flat_image = image.ravel()
sort_order = flat_image.argsort().astype(np.uint32)
flat_image = flat_image[sort_order]
sort_rank = np.zeros_like(sort_order)
is_different = (flat_image[:(-1)] != flat_image[1:])
np.cumsum(is_different, out=sort_rank[1:])
original_values = np.zeros(((sort_rank[(-1)] + 1),), image.dtype)
original_values[0] = flat_image[0]
original_values[1:] = flat_image[1:][is_different]
int_image = np.zeros_like(sort_order)
int_image[sort_order] = sort_rank
return (int_image.reshape(image.shape), original_values)
|
[
"def",
"rank_order",
"(",
"image",
")",
":",
"flat_image",
"=",
"image",
".",
"ravel",
"(",
")",
"sort_order",
"=",
"flat_image",
".",
"argsort",
"(",
")",
".",
"astype",
"(",
"np",
".",
"uint32",
")",
"flat_image",
"=",
"flat_image",
"[",
"sort_order",
"]",
"sort_rank",
"=",
"np",
".",
"zeros_like",
"(",
"sort_order",
")",
"is_different",
"=",
"(",
"flat_image",
"[",
":",
"(",
"-",
"1",
")",
"]",
"!=",
"flat_image",
"[",
"1",
":",
"]",
")",
"np",
".",
"cumsum",
"(",
"is_different",
",",
"out",
"=",
"sort_rank",
"[",
"1",
":",
"]",
")",
"original_values",
"=",
"np",
".",
"zeros",
"(",
"(",
"(",
"sort_rank",
"[",
"(",
"-",
"1",
")",
"]",
"+",
"1",
")",
",",
")",
",",
"image",
".",
"dtype",
")",
"original_values",
"[",
"0",
"]",
"=",
"flat_image",
"[",
"0",
"]",
"original_values",
"[",
"1",
":",
"]",
"=",
"flat_image",
"[",
"1",
":",
"]",
"[",
"is_different",
"]",
"int_image",
"=",
"np",
".",
"zeros_like",
"(",
"sort_order",
")",
"int_image",
"[",
"sort_order",
"]",
"=",
"sort_rank",
"return",
"(",
"int_image",
".",
"reshape",
"(",
"image",
".",
"shape",
")",
",",
"original_values",
")"
] |
return an image of the same shape where each pixel is the index of the pixel value in the ascending order of the unique values of image .
|
train
| false
|
6,245
|
def get_plus_sign_symbol(locale=LC_NUMERIC):
return Locale.parse(locale).number_symbols.get('plusSign', u'+')
|
[
"def",
"get_plus_sign_symbol",
"(",
"locale",
"=",
"LC_NUMERIC",
")",
":",
"return",
"Locale",
".",
"parse",
"(",
"locale",
")",
".",
"number_symbols",
".",
"get",
"(",
"'plusSign'",
",",
"u'+'",
")"
] |
return the plus sign symbol used by the current locale .
|
train
| false
|
6,247
|
def ports_open(name, ports, proto='tcp', direction='in'):
ports = list(map(str, ports))
diff = False
ret = {'name': ','.join(ports), 'changes': {}, 'result': True, 'comment': 'Ports open.'}
current_ports = __salt__['csf.get_ports'](proto=proto, direction=direction)
direction = direction.upper()
directions = __salt__['csf.build_directions'](direction)
for direction in directions:
print current_ports[direction]
print ports
if (current_ports[direction] != ports):
diff = True
if diff:
result = __salt__['csf.allow_ports'](ports, proto=proto, direction=direction)
ret['changes']['Ports'] = 'Changed'
ret['comment'] = result
return ret
|
[
"def",
"ports_open",
"(",
"name",
",",
"ports",
",",
"proto",
"=",
"'tcp'",
",",
"direction",
"=",
"'in'",
")",
":",
"ports",
"=",
"list",
"(",
"map",
"(",
"str",
",",
"ports",
")",
")",
"diff",
"=",
"False",
"ret",
"=",
"{",
"'name'",
":",
"','",
".",
"join",
"(",
"ports",
")",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"True",
",",
"'comment'",
":",
"'Ports open.'",
"}",
"current_ports",
"=",
"__salt__",
"[",
"'csf.get_ports'",
"]",
"(",
"proto",
"=",
"proto",
",",
"direction",
"=",
"direction",
")",
"direction",
"=",
"direction",
".",
"upper",
"(",
")",
"directions",
"=",
"__salt__",
"[",
"'csf.build_directions'",
"]",
"(",
"direction",
")",
"for",
"direction",
"in",
"directions",
":",
"print",
"current_ports",
"[",
"direction",
"]",
"print",
"ports",
"if",
"(",
"current_ports",
"[",
"direction",
"]",
"!=",
"ports",
")",
":",
"diff",
"=",
"True",
"if",
"diff",
":",
"result",
"=",
"__salt__",
"[",
"'csf.allow_ports'",
"]",
"(",
"ports",
",",
"proto",
"=",
"proto",
",",
"direction",
"=",
"direction",
")",
"ret",
"[",
"'changes'",
"]",
"[",
"'Ports'",
"]",
"=",
"'Changed'",
"ret",
"[",
"'comment'",
"]",
"=",
"result",
"return",
"ret"
] |
ensure ports are open for a protocol .
|
train
| false
|
6,250
|
def _package(package, subdirs=None):
dirs = package.split(u'.')
app_dir = os.path.join(u'share', u'git-cola', u'lib', *dirs)
if subdirs:
dirs = (list(subdirs) + dirs)
src_dir = os.path.join(*dirs)
return (app_dir, glob(os.path.join(src_dir, u'*.py')))
|
[
"def",
"_package",
"(",
"package",
",",
"subdirs",
"=",
"None",
")",
":",
"dirs",
"=",
"package",
".",
"split",
"(",
"u'.'",
")",
"app_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"u'share'",
",",
"u'git-cola'",
",",
"u'lib'",
",",
"*",
"dirs",
")",
"if",
"subdirs",
":",
"dirs",
"=",
"(",
"list",
"(",
"subdirs",
")",
"+",
"dirs",
")",
"src_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"dirs",
")",
"return",
"(",
"app_dir",
",",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"src_dir",
",",
"u'*.py'",
")",
")",
")"
] |
collect python files for a given python "package" name .
|
train
| false
|
6,252
|
@pytest.mark.xfail(reason='pickle does not produce same error')
@pytest.mark.parametrize('serial', all_formats)
def test_builtin_403_exception(iris_server, serial):
t = symbol('t', discover(iris))
for name in ('map', 'apply'):
func = getattr(t.species, name)
expr = func(eval, 'int')
query = {'expr': to_tree(expr)}
response = iris_server.post('/compute', data=serial.dumps(query), headers=mimetype(serial))
assert ('403 FORBIDDEN'.lower() in response.status.lower())
|
[
"@",
"pytest",
".",
"mark",
".",
"xfail",
"(",
"reason",
"=",
"'pickle does not produce same error'",
")",
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'serial'",
",",
"all_formats",
")",
"def",
"test_builtin_403_exception",
"(",
"iris_server",
",",
"serial",
")",
":",
"t",
"=",
"symbol",
"(",
"'t'",
",",
"discover",
"(",
"iris",
")",
")",
"for",
"name",
"in",
"(",
"'map'",
",",
"'apply'",
")",
":",
"func",
"=",
"getattr",
"(",
"t",
".",
"species",
",",
"name",
")",
"expr",
"=",
"func",
"(",
"eval",
",",
"'int'",
")",
"query",
"=",
"{",
"'expr'",
":",
"to_tree",
"(",
"expr",
")",
"}",
"response",
"=",
"iris_server",
".",
"post",
"(",
"'/compute'",
",",
"data",
"=",
"serial",
".",
"dumps",
"(",
"query",
")",
",",
"headers",
"=",
"mimetype",
"(",
"serial",
")",
")",
"assert",
"(",
"'403 FORBIDDEN'",
".",
"lower",
"(",
")",
"in",
"response",
".",
"status",
".",
"lower",
"(",
")",
")"
] |
ensure exception is raised when both map and apply are invoked .
|
train
| false
|
6,253
|
def test_mixed_mro_respected():
class OC:
abc = 3
class OC2(OC, ):
pass
class NC(object, ):
pass
class NC2(object, ):
abc = 5
class MC(NC, OC2, NC2, OC, ):
pass
AreEqual(MC.abc, 5)
|
[
"def",
"test_mixed_mro_respected",
"(",
")",
":",
"class",
"OC",
":",
"abc",
"=",
"3",
"class",
"OC2",
"(",
"OC",
",",
")",
":",
"pass",
"class",
"NC",
"(",
"object",
",",
")",
":",
"pass",
"class",
"NC2",
"(",
"object",
",",
")",
":",
"abc",
"=",
"5",
"class",
"MC",
"(",
"NC",
",",
"OC2",
",",
"NC2",
",",
"OC",
",",
")",
":",
"pass",
"AreEqual",
"(",
"MC",
".",
"abc",
",",
"5",
")"
] |
creates a class with an mro of "mc .
|
train
| false
|
6,254
|
def copytree_hardlink(source, dest):
copy2 = shutil.copy2
try:
shutil.copy2 = os.link
shutil.copytree(source, dest)
finally:
shutil.copy2 = copy2
|
[
"def",
"copytree_hardlink",
"(",
"source",
",",
"dest",
")",
":",
"copy2",
"=",
"shutil",
".",
"copy2",
"try",
":",
"shutil",
".",
"copy2",
"=",
"os",
".",
"link",
"shutil",
".",
"copytree",
"(",
"source",
",",
"dest",
")",
"finally",
":",
"shutil",
".",
"copy2",
"=",
"copy2"
] |
recursively copy a directory ala shutils .
|
train
| true
|
6,255
|
def assert_identical(a, b):
assert_equal(a, b)
if (type(b) is str):
assert_equal(type(a), type(b))
else:
assert_equal(np.asarray(a).dtype.type, np.asarray(b).dtype.type)
|
[
"def",
"assert_identical",
"(",
"a",
",",
"b",
")",
":",
"assert_equal",
"(",
"a",
",",
"b",
")",
"if",
"(",
"type",
"(",
"b",
")",
"is",
"str",
")",
":",
"assert_equal",
"(",
"type",
"(",
"a",
")",
",",
"type",
"(",
"b",
")",
")",
"else",
":",
"assert_equal",
"(",
"np",
".",
"asarray",
"(",
"a",
")",
".",
"dtype",
".",
"type",
",",
"np",
".",
"asarray",
"(",
"b",
")",
".",
"dtype",
".",
"type",
")"
] |
assert whether value and type are the same .
|
train
| false
|
6,258
|
def _sanitize_params(prefix, suffix, dir):
output_type = _infer_return_type(prefix, suffix, dir)
if (suffix is None):
suffix = output_type()
if (prefix is None):
if (output_type is str):
prefix = template
else:
prefix = _os.fsencode(template)
if (dir is None):
if (output_type is str):
dir = gettempdir()
else:
dir = gettempdirb()
return (prefix, suffix, dir, output_type)
|
[
"def",
"_sanitize_params",
"(",
"prefix",
",",
"suffix",
",",
"dir",
")",
":",
"output_type",
"=",
"_infer_return_type",
"(",
"prefix",
",",
"suffix",
",",
"dir",
")",
"if",
"(",
"suffix",
"is",
"None",
")",
":",
"suffix",
"=",
"output_type",
"(",
")",
"if",
"(",
"prefix",
"is",
"None",
")",
":",
"if",
"(",
"output_type",
"is",
"str",
")",
":",
"prefix",
"=",
"template",
"else",
":",
"prefix",
"=",
"_os",
".",
"fsencode",
"(",
"template",
")",
"if",
"(",
"dir",
"is",
"None",
")",
":",
"if",
"(",
"output_type",
"is",
"str",
")",
":",
"dir",
"=",
"gettempdir",
"(",
")",
"else",
":",
"dir",
"=",
"gettempdirb",
"(",
")",
"return",
"(",
"prefix",
",",
"suffix",
",",
"dir",
",",
"output_type",
")"
] |
common parameter processing for most apis in this module .
|
train
| false
|
6,259
|
def _flag_default_rules(rules):
if (len(rules) >= len(DEFAULT_POLICY_RULES)):
rules_suffix = tuple(rules[(- len(DEFAULT_POLICY_RULES)):])
if (rules_suffix == DEFAULT_POLICY_RULES):
for rule in rules_suffix:
rule._is_default_suffix = True
|
[
"def",
"_flag_default_rules",
"(",
"rules",
")",
":",
"if",
"(",
"len",
"(",
"rules",
")",
">=",
"len",
"(",
"DEFAULT_POLICY_RULES",
")",
")",
":",
"rules_suffix",
"=",
"tuple",
"(",
"rules",
"[",
"(",
"-",
"len",
"(",
"DEFAULT_POLICY_RULES",
")",
")",
":",
"]",
")",
"if",
"(",
"rules_suffix",
"==",
"DEFAULT_POLICY_RULES",
")",
":",
"for",
"rule",
"in",
"rules_suffix",
":",
"rule",
".",
"_is_default_suffix",
"=",
"True"
] |
determine if part of our policy ends with the defaultly appended suffix .
|
train
| false
|
6,260
|
def _tconfint_generic(mean, std_mean, dof, alpha, alternative):
if (alternative in ['two-sided', '2-sided', '2s']):
tcrit = stats.t.ppf((1 - (alpha / 2.0)), dof)
lower = (mean - (tcrit * std_mean))
upper = (mean + (tcrit * std_mean))
elif (alternative in ['larger', 'l']):
tcrit = stats.t.ppf(alpha, dof)
lower = (mean + (tcrit * std_mean))
upper = np.inf
elif (alternative in ['smaller', 's']):
tcrit = stats.t.ppf((1 - alpha), dof)
lower = (- np.inf)
upper = (mean + (tcrit * std_mean))
else:
raise ValueError('invalid alternative')
return (lower, upper)
|
[
"def",
"_tconfint_generic",
"(",
"mean",
",",
"std_mean",
",",
"dof",
",",
"alpha",
",",
"alternative",
")",
":",
"if",
"(",
"alternative",
"in",
"[",
"'two-sided'",
",",
"'2-sided'",
",",
"'2s'",
"]",
")",
":",
"tcrit",
"=",
"stats",
".",
"t",
".",
"ppf",
"(",
"(",
"1",
"-",
"(",
"alpha",
"/",
"2.0",
")",
")",
",",
"dof",
")",
"lower",
"=",
"(",
"mean",
"-",
"(",
"tcrit",
"*",
"std_mean",
")",
")",
"upper",
"=",
"(",
"mean",
"+",
"(",
"tcrit",
"*",
"std_mean",
")",
")",
"elif",
"(",
"alternative",
"in",
"[",
"'larger'",
",",
"'l'",
"]",
")",
":",
"tcrit",
"=",
"stats",
".",
"t",
".",
"ppf",
"(",
"alpha",
",",
"dof",
")",
"lower",
"=",
"(",
"mean",
"+",
"(",
"tcrit",
"*",
"std_mean",
")",
")",
"upper",
"=",
"np",
".",
"inf",
"elif",
"(",
"alternative",
"in",
"[",
"'smaller'",
",",
"'s'",
"]",
")",
":",
"tcrit",
"=",
"stats",
".",
"t",
".",
"ppf",
"(",
"(",
"1",
"-",
"alpha",
")",
",",
"dof",
")",
"lower",
"=",
"(",
"-",
"np",
".",
"inf",
")",
"upper",
"=",
"(",
"mean",
"+",
"(",
"tcrit",
"*",
"std_mean",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'invalid alternative'",
")",
"return",
"(",
"lower",
",",
"upper",
")"
] |
generic t-confint to save typing .
|
train
| false
|
6,261
|
def count(session, query):
counts = query.selectable.with_only_columns([func.count()])
num_results = session.execute(counts.order_by(None)).scalar()
if ((num_results is None) or (query._limit is not None)):
return query.order_by(None).count()
return num_results
|
[
"def",
"count",
"(",
"session",
",",
"query",
")",
":",
"counts",
"=",
"query",
".",
"selectable",
".",
"with_only_columns",
"(",
"[",
"func",
".",
"count",
"(",
")",
"]",
")",
"num_results",
"=",
"session",
".",
"execute",
"(",
"counts",
".",
"order_by",
"(",
"None",
")",
")",
".",
"scalar",
"(",
")",
"if",
"(",
"(",
"num_results",
"is",
"None",
")",
"or",
"(",
"query",
".",
"_limit",
"is",
"not",
"None",
")",
")",
":",
"return",
"query",
".",
"order_by",
"(",
"None",
")",
".",
"count",
"(",
")",
"return",
"num_results"
] |
get the count of properties for a namespace .
|
train
| false
|
6,262
|
def get_fallback_languages(language, site_id=None):
try:
language = get_language_object(language, site_id)
except LanguageError:
language = get_languages(site_id)[0]
return language.get('fallbacks', [])
|
[
"def",
"get_fallback_languages",
"(",
"language",
",",
"site_id",
"=",
"None",
")",
":",
"try",
":",
"language",
"=",
"get_language_object",
"(",
"language",
",",
"site_id",
")",
"except",
"LanguageError",
":",
"language",
"=",
"get_languages",
"(",
"site_id",
")",
"[",
"0",
"]",
"return",
"language",
".",
"get",
"(",
"'fallbacks'",
",",
"[",
"]",
")"
] |
returns a list of fallback languages for the given language .
|
train
| false
|
6,263
|
def _build_gecos(gecos_dict):
return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''), gecos_dict.get('roomnumber', ''), gecos_dict.get('workphone', ''), gecos_dict.get('homephone', ''))
|
[
"def",
"_build_gecos",
"(",
"gecos_dict",
")",
":",
"return",
"'{0},{1},{2},{3}'",
".",
"format",
"(",
"gecos_dict",
".",
"get",
"(",
"'fullname'",
",",
"''",
")",
",",
"gecos_dict",
".",
"get",
"(",
"'roomnumber'",
",",
"''",
")",
",",
"gecos_dict",
".",
"get",
"(",
"'workphone'",
",",
"''",
")",
",",
"gecos_dict",
".",
"get",
"(",
"'homephone'",
",",
"''",
")",
")"
] |
accepts a dictionary entry containing gecos field names and their values .
|
train
| true
|
6,264
|
def parse_snmp(src_ip_port, dst_ip_port, snmp_layer):
if (type(snmp_layer.community.val) == str):
ver = snmp_layer.version.val
msg = ('SNMPv%d community string: %s' % (ver, snmp_layer.community.val))
printer(src_ip_port, dst_ip_port, msg)
return True
|
[
"def",
"parse_snmp",
"(",
"src_ip_port",
",",
"dst_ip_port",
",",
"snmp_layer",
")",
":",
"if",
"(",
"type",
"(",
"snmp_layer",
".",
"community",
".",
"val",
")",
"==",
"str",
")",
":",
"ver",
"=",
"snmp_layer",
".",
"version",
".",
"val",
"msg",
"=",
"(",
"'SNMPv%d community string: %s'",
"%",
"(",
"ver",
",",
"snmp_layer",
".",
"community",
".",
"val",
")",
")",
"printer",
"(",
"src_ip_port",
",",
"dst_ip_port",
",",
"msg",
")",
"return",
"True"
] |
parse out the snmp version and community string .
|
train
| false
|
6,265
|
def getNewRepository():
return ExportRepository()
|
[
"def",
"getNewRepository",
"(",
")",
":",
"return",
"ExportRepository",
"(",
")"
] |
get new repository .
|
train
| false
|
6,266
|
def test_pypi_xml_transformation():
pypi_hits = [{'name': 'foo', 'summary': 'foo summary', 'version': '1.0'}, {'name': 'foo', 'summary': 'foo summary v2', 'version': '2.0'}, {'_pypi_ordering': 50, 'name': 'bar', 'summary': 'bar summary', 'version': '1.0'}]
expected = [{'versions': ['1.0', '2.0'], 'name': 'foo', 'summary': 'foo summary v2'}, {'versions': ['1.0'], 'name': 'bar', 'summary': 'bar summary'}]
assert (transform_hits(pypi_hits) == expected)
|
[
"def",
"test_pypi_xml_transformation",
"(",
")",
":",
"pypi_hits",
"=",
"[",
"{",
"'name'",
":",
"'foo'",
",",
"'summary'",
":",
"'foo summary'",
",",
"'version'",
":",
"'1.0'",
"}",
",",
"{",
"'name'",
":",
"'foo'",
",",
"'summary'",
":",
"'foo summary v2'",
",",
"'version'",
":",
"'2.0'",
"}",
",",
"{",
"'_pypi_ordering'",
":",
"50",
",",
"'name'",
":",
"'bar'",
",",
"'summary'",
":",
"'bar summary'",
",",
"'version'",
":",
"'1.0'",
"}",
"]",
"expected",
"=",
"[",
"{",
"'versions'",
":",
"[",
"'1.0'",
",",
"'2.0'",
"]",
",",
"'name'",
":",
"'foo'",
",",
"'summary'",
":",
"'foo summary v2'",
"}",
",",
"{",
"'versions'",
":",
"[",
"'1.0'",
"]",
",",
"'name'",
":",
"'bar'",
",",
"'summary'",
":",
"'bar summary'",
"}",
"]",
"assert",
"(",
"transform_hits",
"(",
"pypi_hits",
")",
"==",
"expected",
")"
] |
test transformation of data structures .
|
train
| false
|
6,267
|
def get_role_assignments_for_user(user_db):
result = UserRoleAssignment.query(user=user_db.name)
return result
|
[
"def",
"get_role_assignments_for_user",
"(",
"user_db",
")",
":",
"result",
"=",
"UserRoleAssignment",
".",
"query",
"(",
"user",
"=",
"user_db",
".",
"name",
")",
"return",
"result"
] |
retrieve all the userroleassignmentdb objects for a particular user .
|
train
| false
|
6,268
|
def _cmp_by_reachable_nh(path1, path2):
return None
|
[
"def",
"_cmp_by_reachable_nh",
"(",
"path1",
",",
"path2",
")",
":",
"return",
"None"
] |
compares given paths and selects best path based on reachable next-hop .
|
train
| false
|
6,269
|
def read_csv_file(path):
from csv import reader
with codecs.open(path, u'r', u'utf-8') as msgfile:
data = msgfile.read()
data = data.replace(chr(28), u'').replace(chr(29), u'')
data = reader([r.encode(u'utf-8') for r in data.splitlines()])
newdata = [[unicode(val, u'utf-8') for val in row] for row in data]
return newdata
|
[
"def",
"read_csv_file",
"(",
"path",
")",
":",
"from",
"csv",
"import",
"reader",
"with",
"codecs",
".",
"open",
"(",
"path",
",",
"u'r'",
",",
"u'utf-8'",
")",
"as",
"msgfile",
":",
"data",
"=",
"msgfile",
".",
"read",
"(",
")",
"data",
"=",
"data",
".",
"replace",
"(",
"chr",
"(",
"28",
")",
",",
"u''",
")",
".",
"replace",
"(",
"chr",
"(",
"29",
")",
",",
"u''",
")",
"data",
"=",
"reader",
"(",
"[",
"r",
".",
"encode",
"(",
"u'utf-8'",
")",
"for",
"r",
"in",
"data",
".",
"splitlines",
"(",
")",
"]",
")",
"newdata",
"=",
"[",
"[",
"unicode",
"(",
"val",
",",
"u'utf-8'",
")",
"for",
"val",
"in",
"row",
"]",
"for",
"row",
"in",
"data",
"]",
"return",
"newdata"
] |
read csv file and return as list of list .
|
train
| false
|
6,270
|
def getMaximumByComplexPaths(paths):
maximum = complex((-9.876543219876543e+17), (-9.876543219876543e+17))
for path in paths:
for point in path:
maximum = getMaximum(maximum, point)
return maximum
|
[
"def",
"getMaximumByComplexPaths",
"(",
"paths",
")",
":",
"maximum",
"=",
"complex",
"(",
"(",
"-",
"9.876543219876543e+17",
")",
",",
"(",
"-",
"9.876543219876543e+17",
")",
")",
"for",
"path",
"in",
"paths",
":",
"for",
"point",
"in",
"path",
":",
"maximum",
"=",
"getMaximum",
"(",
"maximum",
",",
"point",
")",
"return",
"maximum"
] |
get a complex with each component the maximum of the respective components of complex paths .
|
train
| false
|
6,271
|
def make_transient(instance):
state = attributes.instance_state(instance)
s = _state_session(state)
if s:
s._expunge_states([state])
state.expired_attributes.clear()
if state.callables:
del state.callables
if state.key:
del state.key
if state._deleted:
del state._deleted
|
[
"def",
"make_transient",
"(",
"instance",
")",
":",
"state",
"=",
"attributes",
".",
"instance_state",
"(",
"instance",
")",
"s",
"=",
"_state_session",
"(",
"state",
")",
"if",
"s",
":",
"s",
".",
"_expunge_states",
"(",
"[",
"state",
"]",
")",
"state",
".",
"expired_attributes",
".",
"clear",
"(",
")",
"if",
"state",
".",
"callables",
":",
"del",
"state",
".",
"callables",
"if",
"state",
".",
"key",
":",
"del",
"state",
".",
"key",
"if",
"state",
".",
"_deleted",
":",
"del",
"state",
".",
"_deleted"
] |
make the given instance transient .
|
train
| false
|
6,272
|
def _path_for_test_id(test_id, max_segment_length=32):
if (test_id.count('.') < 2):
raise ValueError(('Must have at least three components (e.g. foo.bar.baz), got: %r' % (test_id,)))
return '/'.join((segment[:max_segment_length] for segment in test_id.rsplit('.', 2)))
|
[
"def",
"_path_for_test_id",
"(",
"test_id",
",",
"max_segment_length",
"=",
"32",
")",
":",
"if",
"(",
"test_id",
".",
"count",
"(",
"'.'",
")",
"<",
"2",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Must have at least three components (e.g. foo.bar.baz), got: %r'",
"%",
"(",
"test_id",
",",
")",
")",
")",
"return",
"'/'",
".",
"join",
"(",
"(",
"segment",
"[",
":",
"max_segment_length",
"]",
"for",
"segment",
"in",
"test_id",
".",
"rsplit",
"(",
"'.'",
",",
"2",
")",
")",
")"
] |
get the temporary directory path for a test id .
|
train
| false
|
6,273
|
def addSphere(elementNode, faces, radius, vertexes):
bottom = (- radius.z)
sides = evaluate.getSidesMinimumThreeBasedOnPrecision(elementNode, max(radius.x, radius.y, radius.z))
sphereSlices = max((sides / 2), 2)
equator = euclidean.getComplexPolygonByComplexRadius(complex(radius.x, radius.y), sides)
polygons = [triangle_mesh.getAddIndexedLoop([complex()], vertexes, bottom)]
zIncrement = ((radius.z + radius.z) / float(sphereSlices))
z = bottom
for sphereSlice in xrange(1, sphereSlices):
z += zIncrement
zPortion = (abs(z) / radius.z)
multipliedPath = euclidean.getComplexPathByMultiplier(math.sqrt((1.0 - (zPortion * zPortion))), equator)
polygons.append(triangle_mesh.getAddIndexedLoop(multipliedPath, vertexes, z))
polygons.append(triangle_mesh.getAddIndexedLoop([complex()], vertexes, radius.z))
triangle_mesh.addPillarByLoops(faces, polygons)
|
[
"def",
"addSphere",
"(",
"elementNode",
",",
"faces",
",",
"radius",
",",
"vertexes",
")",
":",
"bottom",
"=",
"(",
"-",
"radius",
".",
"z",
")",
"sides",
"=",
"evaluate",
".",
"getSidesMinimumThreeBasedOnPrecision",
"(",
"elementNode",
",",
"max",
"(",
"radius",
".",
"x",
",",
"radius",
".",
"y",
",",
"radius",
".",
"z",
")",
")",
"sphereSlices",
"=",
"max",
"(",
"(",
"sides",
"/",
"2",
")",
",",
"2",
")",
"equator",
"=",
"euclidean",
".",
"getComplexPolygonByComplexRadius",
"(",
"complex",
"(",
"radius",
".",
"x",
",",
"radius",
".",
"y",
")",
",",
"sides",
")",
"polygons",
"=",
"[",
"triangle_mesh",
".",
"getAddIndexedLoop",
"(",
"[",
"complex",
"(",
")",
"]",
",",
"vertexes",
",",
"bottom",
")",
"]",
"zIncrement",
"=",
"(",
"(",
"radius",
".",
"z",
"+",
"radius",
".",
"z",
")",
"/",
"float",
"(",
"sphereSlices",
")",
")",
"z",
"=",
"bottom",
"for",
"sphereSlice",
"in",
"xrange",
"(",
"1",
",",
"sphereSlices",
")",
":",
"z",
"+=",
"zIncrement",
"zPortion",
"=",
"(",
"abs",
"(",
"z",
")",
"/",
"radius",
".",
"z",
")",
"multipliedPath",
"=",
"euclidean",
".",
"getComplexPathByMultiplier",
"(",
"math",
".",
"sqrt",
"(",
"(",
"1.0",
"-",
"(",
"zPortion",
"*",
"zPortion",
")",
")",
")",
",",
"equator",
")",
"polygons",
".",
"append",
"(",
"triangle_mesh",
".",
"getAddIndexedLoop",
"(",
"multipliedPath",
",",
"vertexes",
",",
"z",
")",
")",
"polygons",
".",
"append",
"(",
"triangle_mesh",
".",
"getAddIndexedLoop",
"(",
"[",
"complex",
"(",
")",
"]",
",",
"vertexes",
",",
"radius",
".",
"z",
")",
")",
"triangle_mesh",
".",
"addPillarByLoops",
"(",
"faces",
",",
"polygons",
")"
] |
add sphere by radius .
|
train
| false
|
6,274
|
def maskMatrix(matrix, shape='circle', radius=1.0, center=(0.0, 0.0)):
alphaMask = makeMask(matrix.shape[0], shape, radius, center=(0.0, 0.0), range=[0, 1])
return (matrix * alphaMask)
|
[
"def",
"maskMatrix",
"(",
"matrix",
",",
"shape",
"=",
"'circle'",
",",
"radius",
"=",
"1.0",
",",
"center",
"=",
"(",
"0.0",
",",
"0.0",
")",
")",
":",
"alphaMask",
"=",
"makeMask",
"(",
"matrix",
".",
"shape",
"[",
"0",
"]",
",",
"shape",
",",
"radius",
",",
"center",
"=",
"(",
"0.0",
",",
"0.0",
")",
",",
"range",
"=",
"[",
"0",
",",
"1",
"]",
")",
"return",
"(",
"matrix",
"*",
"alphaMask",
")"
] |
make and apply a mask to an input matrix .
|
train
| false
|
6,275
|
def get_str_resource_ref_from_model(model):
return get_resource_ref_from_model(model).ref
|
[
"def",
"get_str_resource_ref_from_model",
"(",
"model",
")",
":",
"return",
"get_resource_ref_from_model",
"(",
"model",
")",
".",
"ref"
] |
return a resource reference as string given db_model .
|
train
| false
|
6,276
|
def patch_sys(stdin=True, stdout=True, stderr=True):
if PY3:
return
if stdin:
_patch_sys_std('stdin')
if stdout:
_patch_sys_std('stdout')
if stderr:
_patch_sys_std('stderr')
|
[
"def",
"patch_sys",
"(",
"stdin",
"=",
"True",
",",
"stdout",
"=",
"True",
",",
"stderr",
"=",
"True",
")",
":",
"if",
"PY3",
":",
"return",
"if",
"stdin",
":",
"_patch_sys_std",
"(",
"'stdin'",
")",
"if",
"stdout",
":",
"_patch_sys_std",
"(",
"'stdout'",
")",
"if",
"stderr",
":",
"_patch_sys_std",
"(",
"'stderr'",
")"
] |
patch sys .
|
train
| false
|
6,277
|
def local_url(port):
return 'http://localhost:{}'.format(str(port))
|
[
"def",
"local_url",
"(",
"port",
")",
":",
"return",
"'http://localhost:{}'",
".",
"format",
"(",
"str",
"(",
"port",
")",
")"
] |
generates url for a service when running locally .
|
train
| false
|
6,278
|
def encoding_exists(encoding, _aliases=_ENCODING_ALIASES):
try:
codecs.lookup(resolve_encoding(encoding, _aliases))
except LookupError:
return False
return True
|
[
"def",
"encoding_exists",
"(",
"encoding",
",",
"_aliases",
"=",
"_ENCODING_ALIASES",
")",
":",
"try",
":",
"codecs",
".",
"lookup",
"(",
"resolve_encoding",
"(",
"encoding",
",",
"_aliases",
")",
")",
"except",
"LookupError",
":",
"return",
"False",
"return",
"True"
] |
returns true if encoding is valid .
|
train
| false
|
6,280
|
def colormaps():
return sorted(cm.cmap_d)
|
[
"def",
"colormaps",
"(",
")",
":",
"return",
"sorted",
"(",
"cm",
".",
"cmap_d",
")"
] |
matplotlib provides a number of colormaps .
|
train
| false
|
6,281
|
def dict_to_one(dp_dict={}):
return {x: 1 for x in dp_dict}
|
[
"def",
"dict_to_one",
"(",
"dp_dict",
"=",
"{",
"}",
")",
":",
"return",
"{",
"x",
":",
"1",
"for",
"x",
"in",
"dp_dict",
"}"
] |
input a dictionary .
|
train
| false
|
6,282
|
def pts_to_midstep(x, *args):
steps = np.zeros(((1 + len(args)), (2 * len(x))))
x = np.asanyarray(x)
steps[0, 1:(-1):2] = steps[0, 2::2] = ((x[:(-1)] + x[1:]) / 2)
(steps[(0, 0)], steps[(0, (-1))]) = (x[0], x[(-1)])
steps[1:, 0::2] = args
steps[1:, 1::2] = steps[1:, 0::2]
return steps
|
[
"def",
"pts_to_midstep",
"(",
"x",
",",
"*",
"args",
")",
":",
"steps",
"=",
"np",
".",
"zeros",
"(",
"(",
"(",
"1",
"+",
"len",
"(",
"args",
")",
")",
",",
"(",
"2",
"*",
"len",
"(",
"x",
")",
")",
")",
")",
"x",
"=",
"np",
".",
"asanyarray",
"(",
"x",
")",
"steps",
"[",
"0",
",",
"1",
":",
"(",
"-",
"1",
")",
":",
"2",
"]",
"=",
"steps",
"[",
"0",
",",
"2",
":",
":",
"2",
"]",
"=",
"(",
"(",
"x",
"[",
":",
"(",
"-",
"1",
")",
"]",
"+",
"x",
"[",
"1",
":",
"]",
")",
"/",
"2",
")",
"(",
"steps",
"[",
"(",
"0",
",",
"0",
")",
"]",
",",
"steps",
"[",
"(",
"0",
",",
"(",
"-",
"1",
")",
")",
"]",
")",
"=",
"(",
"x",
"[",
"0",
"]",
",",
"x",
"[",
"(",
"-",
"1",
")",
"]",
")",
"steps",
"[",
"1",
":",
",",
"0",
":",
":",
"2",
"]",
"=",
"args",
"steps",
"[",
"1",
":",
",",
"1",
":",
":",
"2",
"]",
"=",
"steps",
"[",
"1",
":",
",",
"0",
":",
":",
"2",
"]",
"return",
"steps"
] |
convert continuous line to mid-steps .
|
train
| false
|
6,284
|
def removeIdentifiersFromDictionary(dictionary):
euclidean.removeElementsFromDictionary(dictionary, ['id', 'name', 'tags'])
return dictionary
|
[
"def",
"removeIdentifiersFromDictionary",
"(",
"dictionary",
")",
":",
"euclidean",
".",
"removeElementsFromDictionary",
"(",
"dictionary",
",",
"[",
"'id'",
",",
"'name'",
",",
"'tags'",
"]",
")",
"return",
"dictionary"
] |
remove the identifier elements from a dictionary .
|
train
| false
|
6,286
|
def test_compute_debiasing():
rng = np.random.RandomState(42)
G = rng.randn(10, 4)
X = rng.randn(4, 20)
debias_true = np.arange(1, 5, dtype=np.float)
M = np.dot(G, (X * debias_true[:, np.newaxis]))
debias = compute_bias(M, G, X, max_iter=10000, n_orient=1, tol=1e-07)
assert_almost_equal(debias, debias_true, decimal=5)
debias = compute_bias(M, G, X, max_iter=10000, n_orient=2, tol=1e-05)
assert_almost_equal(debias, [1.8, 1.8, 3.72, 3.72], decimal=2)
|
[
"def",
"test_compute_debiasing",
"(",
")",
":",
"rng",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
"42",
")",
"G",
"=",
"rng",
".",
"randn",
"(",
"10",
",",
"4",
")",
"X",
"=",
"rng",
".",
"randn",
"(",
"4",
",",
"20",
")",
"debias_true",
"=",
"np",
".",
"arange",
"(",
"1",
",",
"5",
",",
"dtype",
"=",
"np",
".",
"float",
")",
"M",
"=",
"np",
".",
"dot",
"(",
"G",
",",
"(",
"X",
"*",
"debias_true",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
")",
")",
"debias",
"=",
"compute_bias",
"(",
"M",
",",
"G",
",",
"X",
",",
"max_iter",
"=",
"10000",
",",
"n_orient",
"=",
"1",
",",
"tol",
"=",
"1e-07",
")",
"assert_almost_equal",
"(",
"debias",
",",
"debias_true",
",",
"decimal",
"=",
"5",
")",
"debias",
"=",
"compute_bias",
"(",
"M",
",",
"G",
",",
"X",
",",
"max_iter",
"=",
"10000",
",",
"n_orient",
"=",
"2",
",",
"tol",
"=",
"1e-05",
")",
"assert_almost_equal",
"(",
"debias",
",",
"[",
"1.8",
",",
"1.8",
",",
"3.72",
",",
"3.72",
"]",
",",
"decimal",
"=",
"2",
")"
] |
test source amplitude debiasing .
|
train
| false
|
6,287
|
def set_nodename(facts):
if (('node' in facts) and ('common' in facts)):
if (('cloudprovider' in facts) and (facts['cloudprovider']['kind'] == 'openstack')):
facts['node']['nodename'] = facts['provider']['metadata']['hostname'].replace('.novalocal', '')
else:
facts['node']['nodename'] = facts['common']['hostname'].lower()
return facts
|
[
"def",
"set_nodename",
"(",
"facts",
")",
":",
"if",
"(",
"(",
"'node'",
"in",
"facts",
")",
"and",
"(",
"'common'",
"in",
"facts",
")",
")",
":",
"if",
"(",
"(",
"'cloudprovider'",
"in",
"facts",
")",
"and",
"(",
"facts",
"[",
"'cloudprovider'",
"]",
"[",
"'kind'",
"]",
"==",
"'openstack'",
")",
")",
":",
"facts",
"[",
"'node'",
"]",
"[",
"'nodename'",
"]",
"=",
"facts",
"[",
"'provider'",
"]",
"[",
"'metadata'",
"]",
"[",
"'hostname'",
"]",
".",
"replace",
"(",
"'.novalocal'",
",",
"''",
")",
"else",
":",
"facts",
"[",
"'node'",
"]",
"[",
"'nodename'",
"]",
"=",
"facts",
"[",
"'common'",
"]",
"[",
"'hostname'",
"]",
".",
"lower",
"(",
")",
"return",
"facts"
] |
set nodename .
|
train
| false
|
6,290
|
@requires_pyopengl()
def test_pyopengl():
from vispy.gloo.gl import pyopengl2
_test_function_names(pyopengl2)
_test_constant_names(pyopengl2)
|
[
"@",
"requires_pyopengl",
"(",
")",
"def",
"test_pyopengl",
"(",
")",
":",
"from",
"vispy",
".",
"gloo",
".",
"gl",
"import",
"pyopengl2",
"_test_function_names",
"(",
"pyopengl2",
")",
"_test_constant_names",
"(",
"pyopengl2",
")"
] |
pyopengl backend should have all es 2 .
|
train
| false
|
6,291
|
def get_trigger(trigger):
if callable(trigger):
return trigger
elif (trigger is None):
return _never_fire_trigger
else:
return interval.IntervalTrigger(*trigger)
|
[
"def",
"get_trigger",
"(",
"trigger",
")",
":",
"if",
"callable",
"(",
"trigger",
")",
":",
"return",
"trigger",
"elif",
"(",
"trigger",
"is",
"None",
")",
":",
"return",
"_never_fire_trigger",
"else",
":",
"return",
"interval",
".",
"IntervalTrigger",
"(",
"*",
"trigger",
")"
] |
gets a trigger object .
|
train
| false
|
6,292
|
def get_repository_type_from_tool_shed(app, tool_shed_url, name, owner):
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url)
params = dict(name=name, owner=owner)
pathspec = ['repository', 'get_repository_type']
repository_type = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
return repository_type
|
[
"def",
"get_repository_type_from_tool_shed",
"(",
"app",
",",
"tool_shed_url",
",",
"name",
",",
"owner",
")",
":",
"tool_shed_url",
"=",
"common_util",
".",
"get_tool_shed_url_from_tool_shed_registry",
"(",
"app",
",",
"tool_shed_url",
")",
"params",
"=",
"dict",
"(",
"name",
"=",
"name",
",",
"owner",
"=",
"owner",
")",
"pathspec",
"=",
"[",
"'repository'",
",",
"'get_repository_type'",
"]",
"repository_type",
"=",
"util",
".",
"url_get",
"(",
"tool_shed_url",
",",
"password_mgr",
"=",
"app",
".",
"tool_shed_registry",
".",
"url_auth",
"(",
"tool_shed_url",
")",
",",
"pathspec",
"=",
"pathspec",
",",
"params",
"=",
"params",
")",
"return",
"repository_type"
] |
send a request to the tool shed to retrieve the type for a repository defined by the combination of a name and owner .
|
train
| false
|
6,293
|
def apply_received_command(event):
device_id = slugify(event.device.id_string.lower())
if (device_id not in RFX_DEVICES):
return
_LOGGER.debug('Device_id: %s device_update. Command: %s', device_id, event.values['Command'])
if ((event.values['Command'] == 'On') or (event.values['Command'] == 'Off')):
is_on = (event.values['Command'] == 'On')
RFX_DEVICES[device_id].update_state(is_on)
elif (hasattr(RFX_DEVICES[device_id], 'brightness') and (event.values['Command'] == 'Set level')):
_brightness = ((event.values['Dim level'] * 255) // 100)
is_on = (_brightness > 0)
RFX_DEVICES[device_id].update_state(is_on, _brightness)
if RFX_DEVICES[device_id].should_fire_event:
RFX_DEVICES[device_id].hass.bus.fire(EVENT_BUTTON_PRESSED, {ATTR_ENTITY_ID: RFX_DEVICES[device_id].entity_id, ATTR_STATE: event.values['Command'].lower()})
_LOGGER.info('Rfxtrx fired event: (event_type: %s, %s: %s, %s: %s)', EVENT_BUTTON_PRESSED, ATTR_ENTITY_ID, RFX_DEVICES[device_id].entity_id, ATTR_STATE, event.values['Command'].lower())
|
[
"def",
"apply_received_command",
"(",
"event",
")",
":",
"device_id",
"=",
"slugify",
"(",
"event",
".",
"device",
".",
"id_string",
".",
"lower",
"(",
")",
")",
"if",
"(",
"device_id",
"not",
"in",
"RFX_DEVICES",
")",
":",
"return",
"_LOGGER",
".",
"debug",
"(",
"'Device_id: %s device_update. Command: %s'",
",",
"device_id",
",",
"event",
".",
"values",
"[",
"'Command'",
"]",
")",
"if",
"(",
"(",
"event",
".",
"values",
"[",
"'Command'",
"]",
"==",
"'On'",
")",
"or",
"(",
"event",
".",
"values",
"[",
"'Command'",
"]",
"==",
"'Off'",
")",
")",
":",
"is_on",
"=",
"(",
"event",
".",
"values",
"[",
"'Command'",
"]",
"==",
"'On'",
")",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"update_state",
"(",
"is_on",
")",
"elif",
"(",
"hasattr",
"(",
"RFX_DEVICES",
"[",
"device_id",
"]",
",",
"'brightness'",
")",
"and",
"(",
"event",
".",
"values",
"[",
"'Command'",
"]",
"==",
"'Set level'",
")",
")",
":",
"_brightness",
"=",
"(",
"(",
"event",
".",
"values",
"[",
"'Dim level'",
"]",
"*",
"255",
")",
"//",
"100",
")",
"is_on",
"=",
"(",
"_brightness",
">",
"0",
")",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"update_state",
"(",
"is_on",
",",
"_brightness",
")",
"if",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"should_fire_event",
":",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"hass",
".",
"bus",
".",
"fire",
"(",
"EVENT_BUTTON_PRESSED",
",",
"{",
"ATTR_ENTITY_ID",
":",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"entity_id",
",",
"ATTR_STATE",
":",
"event",
".",
"values",
"[",
"'Command'",
"]",
".",
"lower",
"(",
")",
"}",
")",
"_LOGGER",
".",
"info",
"(",
"'Rfxtrx fired event: (event_type: %s, %s: %s, %s: %s)'",
",",
"EVENT_BUTTON_PRESSED",
",",
"ATTR_ENTITY_ID",
",",
"RFX_DEVICES",
"[",
"device_id",
"]",
".",
"entity_id",
",",
"ATTR_STATE",
",",
"event",
".",
"values",
"[",
"'Command'",
"]",
".",
"lower",
"(",
")",
")"
] |
apply command from rfxtrx .
|
train
| false
|
6,294
|
def timing(function):
@wraps(function)
def wrapped(*args, **kwargs):
start_time = time.time()
ret = function(*args, **salt.utils.clean_kwargs(**kwargs))
end_time = time.time()
if function.__module__.startswith('salt.loaded.int.'):
mod_name = function.__module__[16:]
else:
mod_name = function.__module__
log.profile('Function {0}.{1} took {2:.20f} seconds to execute'.format(mod_name, function.__name__, (end_time - start_time)))
return ret
return wrapped
|
[
"def",
"timing",
"(",
"function",
")",
":",
"@",
"wraps",
"(",
"function",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"ret",
"=",
"function",
"(",
"*",
"args",
",",
"**",
"salt",
".",
"utils",
".",
"clean_kwargs",
"(",
"**",
"kwargs",
")",
")",
"end_time",
"=",
"time",
".",
"time",
"(",
")",
"if",
"function",
".",
"__module__",
".",
"startswith",
"(",
"'salt.loaded.int.'",
")",
":",
"mod_name",
"=",
"function",
".",
"__module__",
"[",
"16",
":",
"]",
"else",
":",
"mod_name",
"=",
"function",
".",
"__module__",
"log",
".",
"profile",
"(",
"'Function {0}.{1} took {2:.20f} seconds to execute'",
".",
"format",
"(",
"mod_name",
",",
"function",
".",
"__name__",
",",
"(",
"end_time",
"-",
"start_time",
")",
")",
")",
"return",
"ret",
"return",
"wrapped"
] |
timing -> t_total execute a function once .
|
train
| true
|
6,295
|
@with_setup(prepare_stdout)
def test_output_outlines_success_colorless():
runner = Runner(join_path('ru', 'success', 'outlines.feature'), verbosity=3, no_color=True)
runner.run()
assert_stdout_lines(u'\n\u0424\u0443\u043d\u043a\u0446\u0438\u043e\u043d\u0430\u043b: \u041f\u0440\u043e\u0432\u0435\u0440\u0438\u0442\u044c \u0432\u044b\u0432\u043e\u0434 \u0441\u0442\u0440\u0443\u043a\u0442\u0443\u0440\u043d\u043e\u0433\u043e \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u044f # tests/functional/language_specific_features/ru/success/outlines.feature:3\n \u041a\u0430\u043a \u043f\u0440\u043e\u0433\u0440\u0430\u043c\u043c\u0438\u0441\u0442 # tests/functional/language_specific_features/ru/success/outlines.feature:4\n \u0414\u043b\u044f \u0442\u043e\u0433\u043e \u0447\u043e\u0431\u044b lettuce \u0431\u044b\u043b \u043d\u0430\u0434\u0435\u0436\u043d\u044b\u043c # tests/functional/language_specific_features/ru/success/outlines.feature:5\n \u042f \u0445\u043e\u0447\u0443, \u0447\u0442\u043e \u0431\u044b \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u0438 \u0441\u043e \u0441\u0442\u0440\u0443\u043a\u0442\u0443\u0440\u043e\u0439 \u0440\u0430\u0431\u043e\u0442\u0430\u043b\u0438 \u043d\u0430 \u0440\u0443\u0441\u0441\u043a\u043e\u043c # tests/functional/language_specific_features/ru/success/outlines.feature:6\n\n \u0421\u0442\u0440\u0443\u043a\u0442\u0443\u0440\u0430 \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u044f: \u0417\u0430\u043f\u043e\u043b\u043d\u0438\u0442\u044c \u0444\u043e\u0440\u043c\u0443 # tests/functional/language_specific_features/ru/success/outlines.feature:8\n \u041f\u0443\u0441\u043a\u0430\u0439 \u044f \u043e\u0442\u043a\u0440\u044b\u0432\u0430\u044e \u0432 \u0431\u0440\u0430\u0443\u0437\u0435\u0440\u0435 "http://sona-studio.com/contacts/" # tests/functional/language_specific_features/ru/success/outlines_steps.py:12\n \u041a\u043e\u0433\u0434\u0430 \u044f \u0437\u0430\u043f\u043e\u043b\u043d\u044f\u044e \u0432 \u043f\u043e\u043b\u0435 "\u0418\u043c\u044f" "<\u0438\u043c\u044f>" # tests/functional/language_specific_features/ru/success/outlines_steps.py:16\n \u0418 \u044f \u0437\u0430\u043f\u043e\u043b\u043d\u044f\u044e \u0432 \u043f\u043e\u043b\u0435 "Email" "<email>" # tests/functional/language_specific_features/ru/success/outlines_steps.py:24\n \u0418 \u044f \u0437\u0430\u043f\u043e\u043b\u043d\u044f\u044e \u0432 \u043f\u043e\u043b\u0435 "\u0421\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0435" "<\u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0435>" # tests/functional/language_specific_features/ru/success/outlines_steps.py:32\n \u0418 \u044f \u043d\u0430\u0436\u0438\u043c\u0430\u044e "\u041e\u0442\u043f\u0440\u0430\u0432\u0438\u0442\u044c" # tests/functional/language_specific_features/ru/success/outlines_steps.py:40\n \u0422\u043e\u0433\u0434\u0430 \u044f \u043f\u043e\u043b\u0443\u0447\u0430\u044e \u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0435 "\u0421\u043f\u0430\u0441\u0438\u0431\u043e \u0437\u0430 \u0432\u0430\u0448\u0435 \u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0435" # tests/functional/language_specific_features/ru/success/outlines_steps.py:43\n\n \u041f\u0440\u0438\u043c\u0435\u0440\u044b:\n | \u0438\u043c\u044f | email | \u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0435 |\n | \u0412\u0438\u0442\u0430\u043b\u0438\u0439 \u0418\u0433\u043e\u0440\u0435\u0432\u0438\u0447 | john@gmail.org | \u0415\u0441\u0442\u044c \u0438\u043d\u0442\u0435\u0440\u0435\u0441\u043d\u044b\u0439 \u043f\u0440\u043e\u0435\u043a\u0442, \u043d\u0443\u0436\u043d\u043e \u043e\u0431\u0441\u0443\u0434\u0438\u0442\u044c |\n | \u041c\u0430\u0440\u0438\u043d\u0430 \u0411\u0430\u043d\u0440\u0430\u0443\u043b | mary@email.com | \u041c\u043d\u0435 \u043d\u0440\u0430\u0432\u044f\u0442\u0441\u044f \u0432\u0430\u0448\u0438 \u0434\u0438\u0437\u0430\u0439\u043d\u044b, \u0445\u043e\u0447\u0443 \u0441\u0430\u0439\u0442 |\n\n1 feature (1 passed)\n2 scenarios (2 passed)\n12 steps (12 passed)\n')
|
[
"@",
"with_setup",
"(",
"prepare_stdout",
")",
"def",
"test_output_outlines_success_colorless",
"(",
")",
":",
"runner",
"=",
"Runner",
"(",
"join_path",
"(",
"'ru'",
",",
"'success'",
",",
"'outlines.feature'",
")",
",",
"verbosity",
"=",
"3",
",",
"no_color",
"=",
"True",
")",
"runner",
".",
"run",
"(",
")",
"assert_stdout_lines",
"(",
"u'\\n\\u0424\\u0443\\u043d\\u043a\\u0446\\u0438\\u043e\\u043d\\u0430\\u043b: \\u041f\\u0440\\u043e\\u0432\\u0435\\u0440\\u0438\\u0442\\u044c \\u0432\\u044b\\u0432\\u043e\\u0434 \\u0441\\u0442\\u0440\\u0443\\u043a\\u0442\\u0443\\u0440\\u043d\\u043e\\u0433\\u043e \\u0441\\u0446\\u0435\\u043d\\u0430\\u0440\\u0438\\u044f # tests/functional/language_specific_features/ru/success/outlines.feature:3\\n \\u041a\\u0430\\u043a \\u043f\\u0440\\u043e\\u0433\\u0440\\u0430\\u043c\\u043c\\u0438\\u0441\\u0442 # tests/functional/language_specific_features/ru/success/outlines.feature:4\\n \\u0414\\u043b\\u044f \\u0442\\u043e\\u0433\\u043e \\u0447\\u043e\\u0431\\u044b lettuce \\u0431\\u044b\\u043b \\u043d\\u0430\\u0434\\u0435\\u0436\\u043d\\u044b\\u043c # tests/functional/language_specific_features/ru/success/outlines.feature:5\\n \\u042f \\u0445\\u043e\\u0447\\u0443, \\u0447\\u0442\\u043e \\u0431\\u044b \\u0441\\u0446\\u0435\\u043d\\u0430\\u0440\\u0438\\u0438 \\u0441\\u043e \\u0441\\u0442\\u0440\\u0443\\u043a\\u0442\\u0443\\u0440\\u043e\\u0439 \\u0440\\u0430\\u0431\\u043e\\u0442\\u0430\\u043b\\u0438 \\u043d\\u0430 \\u0440\\u0443\\u0441\\u0441\\u043a\\u043e\\u043c # tests/functional/language_specific_features/ru/success/outlines.feature:6\\n\\n \\u0421\\u0442\\u0440\\u0443\\u043a\\u0442\\u0443\\u0440\\u0430 \\u0441\\u0446\\u0435\\u043d\\u0430\\u0440\\u0438\\u044f: \\u0417\\u0430\\u043f\\u043e\\u043b\\u043d\\u0438\\u0442\\u044c \\u0444\\u043e\\u0440\\u043c\\u0443 # tests/functional/language_specific_features/ru/success/outlines.feature:8\\n \\u041f\\u0443\\u0441\\u043a\\u0430\\u0439 \\u044f \\u043e\\u0442\\u043a\\u0440\\u044b\\u0432\\u0430\\u044e \\u0432 \\u0431\\u0440\\u0430\\u0443\\u0437\\u0435\\u0440\\u0435 \"http://sona-studio.com/contacts/\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:12\\n \\u041a\\u043e\\u0433\\u0434\\u0430 \\u044f \\u0437\\u0430\\u043f\\u043e\\u043b\\u043d\\u044f\\u044e \\u0432 \\u043f\\u043e\\u043b\\u0435 \"\\u0418\\u043c\\u044f\" \"<\\u0438\\u043c\\u044f>\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:16\\n \\u0418 \\u044f \\u0437\\u0430\\u043f\\u043e\\u043b\\u043d\\u044f\\u044e \\u0432 \\u043f\\u043e\\u043b\\u0435 \"Email\" \"<email>\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:24\\n \\u0418 \\u044f \\u0437\\u0430\\u043f\\u043e\\u043b\\u043d\\u044f\\u044e \\u0432 \\u043f\\u043e\\u043b\\u0435 \"\\u0421\\u043e\\u043e\\u0431\\u0449\\u0435\\u043d\\u0438\\u0435\" \"<\\u0441\\u043e\\u043e\\u0431\\u0449\\u0435\\u043d\\u0438\\u0435>\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:32\\n \\u0418 \\u044f \\u043d\\u0430\\u0436\\u0438\\u043c\\u0430\\u044e \"\\u041e\\u0442\\u043f\\u0440\\u0430\\u0432\\u0438\\u0442\\u044c\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:40\\n \\u0422\\u043e\\u0433\\u0434\\u0430 \\u044f \\u043f\\u043e\\u043b\\u0443\\u0447\\u0430\\u044e \\u0441\\u043e\\u043e\\u0431\\u0449\\u0435\\u043d\\u0438\\u0435 \"\\u0421\\u043f\\u0430\\u0441\\u0438\\u0431\\u043e \\u0437\\u0430 \\u0432\\u0430\\u0448\\u0435 \\u0441\\u043e\\u043e\\u0431\\u0449\\u0435\\u043d\\u0438\\u0435\" # tests/functional/language_specific_features/ru/success/outlines_steps.py:43\\n\\n \\u041f\\u0440\\u0438\\u043c\\u0435\\u0440\\u044b:\\n | \\u0438\\u043c\\u044f | email | \\u0441\\u043e\\u043e\\u0431\\u0449\\u0435\\u043d\\u0438\\u0435 |\\n | \\u0412\\u0438\\u0442\\u0430\\u043b\\u0438\\u0439 \\u0418\\u0433\\u043e\\u0440\\u0435\\u0432\\u0438\\u0447 | john@gmail.org | \\u0415\\u0441\\u0442\\u044c \\u0438\\u043d\\u0442\\u0435\\u0440\\u0435\\u0441\\u043d\\u044b\\u0439 \\u043f\\u0440\\u043e\\u0435\\u043a\\u0442, \\u043d\\u0443\\u0436\\u043d\\u043e \\u043e\\u0431\\u0441\\u0443\\u0434\\u0438\\u0442\\u044c |\\n | \\u041c\\u0430\\u0440\\u0438\\u043d\\u0430 \\u0411\\u0430\\u043d\\u0440\\u0430\\u0443\\u043b | mary@email.com | \\u041c\\u043d\\u0435 \\u043d\\u0440\\u0430\\u0432\\u044f\\u0442\\u0441\\u044f \\u0432\\u0430\\u0448\\u0438 \\u0434\\u0438\\u0437\\u0430\\u0439\\u043d\\u044b, \\u0445\\u043e\\u0447\\u0443 \\u0441\\u0430\\u0439\\u0442 |\\n\\n1 feature (1 passed)\\n2 scenarios (2 passed)\\n12 steps (12 passed)\\n'",
")"
] |
language: fr -> sucess outlines colorless .
|
train
| false
|
6,296
|
def parse_mtestfile(fname):
with open(fname) as fp:
for line in fp:
if ('--' in line):
line = line[:line.index('--')]
if (not line.strip()):
continue
(lhs, rhs) = line.split('->')
(id, fn, arg) = lhs.split()
rhs_pieces = rhs.split()
exp = rhs_pieces[0]
flags = rhs_pieces[1:]
(yield (id, fn, float(arg), float(exp), flags))
|
[
"def",
"parse_mtestfile",
"(",
"fname",
")",
":",
"with",
"open",
"(",
"fname",
")",
"as",
"fp",
":",
"for",
"line",
"in",
"fp",
":",
"if",
"(",
"'--'",
"in",
"line",
")",
":",
"line",
"=",
"line",
"[",
":",
"line",
".",
"index",
"(",
"'--'",
")",
"]",
"if",
"(",
"not",
"line",
".",
"strip",
"(",
")",
")",
":",
"continue",
"(",
"lhs",
",",
"rhs",
")",
"=",
"line",
".",
"split",
"(",
"'->'",
")",
"(",
"id",
",",
"fn",
",",
"arg",
")",
"=",
"lhs",
".",
"split",
"(",
")",
"rhs_pieces",
"=",
"rhs",
".",
"split",
"(",
")",
"exp",
"=",
"rhs_pieces",
"[",
"0",
"]",
"flags",
"=",
"rhs_pieces",
"[",
"1",
":",
"]",
"(",
"yield",
"(",
"id",
",",
"fn",
",",
"float",
"(",
"arg",
")",
",",
"float",
"(",
"exp",
")",
",",
"flags",
")",
")"
] |
parse a file with test values -- starts a comment blank lines .
|
train
| false
|
6,297
|
def GetServiceVersions(namespace):
def compare(a, b):
if (a == b):
return 0
if (b in parentMap[a]):
return (-1)
if (a in parentMap[b]):
return 1
return ((a > b) - (a < b))
if PY3:
return sorted([v for (v, n) in iteritems(serviceNsMap) if (n == namespace)], key=cmp_to_key(compare))
else:
return sorted([v for (v, n) in iteritems(serviceNsMap) if (n == namespace)], compare)
|
[
"def",
"GetServiceVersions",
"(",
"namespace",
")",
":",
"def",
"compare",
"(",
"a",
",",
"b",
")",
":",
"if",
"(",
"a",
"==",
"b",
")",
":",
"return",
"0",
"if",
"(",
"b",
"in",
"parentMap",
"[",
"a",
"]",
")",
":",
"return",
"(",
"-",
"1",
")",
"if",
"(",
"a",
"in",
"parentMap",
"[",
"b",
"]",
")",
":",
"return",
"1",
"return",
"(",
"(",
"a",
">",
"b",
")",
"-",
"(",
"a",
"<",
"b",
")",
")",
"if",
"PY3",
":",
"return",
"sorted",
"(",
"[",
"v",
"for",
"(",
"v",
",",
"n",
")",
"in",
"iteritems",
"(",
"serviceNsMap",
")",
"if",
"(",
"n",
"==",
"namespace",
")",
"]",
",",
"key",
"=",
"cmp_to_key",
"(",
"compare",
")",
")",
"else",
":",
"return",
"sorted",
"(",
"[",
"v",
"for",
"(",
"v",
",",
"n",
")",
"in",
"iteritems",
"(",
"serviceNsMap",
")",
"if",
"(",
"n",
"==",
"namespace",
")",
"]",
",",
"compare",
")"
] |
get all the versions for the service with specified namespace ordered by compatibility .
|
train
| true
|
6,298
|
def get_suggestion(exploration_id, thread_id):
model = feedback_models.SuggestionModel.get_by_exploration_and_thread_id(exploration_id, thread_id)
return (_get_suggestion_from_model(model) if model else None)
|
[
"def",
"get_suggestion",
"(",
"exploration_id",
",",
"thread_id",
")",
":",
"model",
"=",
"feedback_models",
".",
"SuggestionModel",
".",
"get_by_exploration_and_thread_id",
"(",
"exploration_id",
",",
"thread_id",
")",
"return",
"(",
"_get_suggestion_from_model",
"(",
"model",
")",
"if",
"model",
"else",
"None",
")"
] |
fetches the suggestion for the given thread .
|
train
| false
|
6,300
|
def push_notebook(document=None, state=None, handle=None):
if (state is None):
state = _state
if state.server_enabled:
raise RuntimeError('output_server() has been called, which is incompatible with push_notebook')
if (not document):
document = state.document
if (not document):
warnings.warn('No document to push')
return
if (handle is None):
handle = state.last_comms_handle
if (not handle):
warnings.warn('Cannot find a last shown plot to update. Call output_notebook() and show(..., notebook_handle=True) before push_notebook()')
return
to_json = document.to_json()
if (handle.doc is not document):
msg = dict(doc=to_json)
else:
msg = Document._compute_patch_between_json(handle.json, to_json)
handle.comms.send(json.dumps(msg))
handle.update(document, to_json)
|
[
"def",
"push_notebook",
"(",
"document",
"=",
"None",
",",
"state",
"=",
"None",
",",
"handle",
"=",
"None",
")",
":",
"if",
"(",
"state",
"is",
"None",
")",
":",
"state",
"=",
"_state",
"if",
"state",
".",
"server_enabled",
":",
"raise",
"RuntimeError",
"(",
"'output_server() has been called, which is incompatible with push_notebook'",
")",
"if",
"(",
"not",
"document",
")",
":",
"document",
"=",
"state",
".",
"document",
"if",
"(",
"not",
"document",
")",
":",
"warnings",
".",
"warn",
"(",
"'No document to push'",
")",
"return",
"if",
"(",
"handle",
"is",
"None",
")",
":",
"handle",
"=",
"state",
".",
"last_comms_handle",
"if",
"(",
"not",
"handle",
")",
":",
"warnings",
".",
"warn",
"(",
"'Cannot find a last shown plot to update. Call output_notebook() and show(..., notebook_handle=True) before push_notebook()'",
")",
"return",
"to_json",
"=",
"document",
".",
"to_json",
"(",
")",
"if",
"(",
"handle",
".",
"doc",
"is",
"not",
"document",
")",
":",
"msg",
"=",
"dict",
"(",
"doc",
"=",
"to_json",
")",
"else",
":",
"msg",
"=",
"Document",
".",
"_compute_patch_between_json",
"(",
"handle",
".",
"json",
",",
"to_json",
")",
"handle",
".",
"comms",
".",
"send",
"(",
"json",
".",
"dumps",
"(",
"msg",
")",
")",
"handle",
".",
"update",
"(",
"document",
",",
"to_json",
")"
] |
update bokeh plots in a jupyter notebook output cells with new data or property values .
|
train
| false
|
6,301
|
def _image_present(client, image_uuid):
headers = client.get_image_meta(image_uuid)
return ('status' in headers)
|
[
"def",
"_image_present",
"(",
"client",
",",
"image_uuid",
")",
":",
"headers",
"=",
"client",
".",
"get_image_meta",
"(",
"image_uuid",
")",
"return",
"(",
"'status'",
"in",
"headers",
")"
] |
check if an image is present in glance .
|
train
| false
|
6,302
|
def build_request_with_data(url, data, api_key, method):
http_redirect_with_data_handler = HTTPRedirectWithDataHandler(method=method)
opener = urllib2.build_opener(http_redirect_with_data_handler)
urllib2.install_opener(opener)
url = make_url(url, api_key=api_key, args=None)
request = urllib2.Request(url, headers={'Content-Type': 'application/json'}, data=json.dumps(data))
request_method = request.get_method()
if (request_method != method):
request.get_method = (lambda : method)
return (opener, request)
|
[
"def",
"build_request_with_data",
"(",
"url",
",",
"data",
",",
"api_key",
",",
"method",
")",
":",
"http_redirect_with_data_handler",
"=",
"HTTPRedirectWithDataHandler",
"(",
"method",
"=",
"method",
")",
"opener",
"=",
"urllib2",
".",
"build_opener",
"(",
"http_redirect_with_data_handler",
")",
"urllib2",
".",
"install_opener",
"(",
"opener",
")",
"url",
"=",
"make_url",
"(",
"url",
",",
"api_key",
"=",
"api_key",
",",
"args",
"=",
"None",
")",
"request",
"=",
"urllib2",
".",
"Request",
"(",
"url",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
")",
"request_method",
"=",
"request",
".",
"get_method",
"(",
")",
"if",
"(",
"request_method",
"!=",
"method",
")",
":",
"request",
".",
"get_method",
"=",
"(",
"lambda",
":",
"method",
")",
"return",
"(",
"opener",
",",
"request",
")"
] |
build a request with the received method .
|
train
| false
|
6,304
|
def spArticlesForLang(lang):
if (lang in _SP_ART_CACHE):
return _SP_ART_CACHE[lang]
spArticles = addTrailingSpace(LANG_ARTICLESget(lang, GENERIC_ARTICLES))
_SP_ART_CACHE[lang] = spArticles
return spArticles
|
[
"def",
"spArticlesForLang",
"(",
"lang",
")",
":",
"if",
"(",
"lang",
"in",
"_SP_ART_CACHE",
")",
":",
"return",
"_SP_ART_CACHE",
"[",
"lang",
"]",
"spArticles",
"=",
"addTrailingSpace",
"(",
"LANG_ARTICLESget",
"(",
"lang",
",",
"GENERIC_ARTICLES",
")",
")",
"_SP_ART_CACHE",
"[",
"lang",
"]",
"=",
"spArticles",
"return",
"spArticles"
] |
return lists of articles specific for the given language .
|
train
| false
|
6,306
|
def createFontList(fontfiles, fontext=u'ttf'):
fontlist = []
seen = {}
for fpath in fontfiles:
verbose.report((u'createFontDict: %s' % fpath), u'debug')
fname = os.path.split(fpath)[1]
if (fname in seen):
continue
else:
seen[fname] = 1
if (fontext == u'afm'):
try:
fh = open(fpath, u'rb')
except EnvironmentError:
verbose.report((u'Could not open font file %s' % fpath))
continue
try:
font = afm.AFM(fh)
except RuntimeError:
verbose.report((u'Could not parse font file %s' % fpath))
continue
finally:
fh.close()
try:
prop = afmFontProperty(fpath, font)
except KeyError:
continue
else:
try:
font = ft2font.FT2Font(fpath)
except RuntimeError:
verbose.report((u'Could not open font file %s' % fpath))
continue
except UnicodeError:
verbose.report(u'Cannot handle unicode filenames')
continue
except IOError:
verbose.report((u'IO error - cannot open font file %s' % fpath))
continue
try:
prop = ttfFontProperty(font)
except (KeyError, RuntimeError, ValueError):
continue
fontlist.append(prop)
return fontlist
|
[
"def",
"createFontList",
"(",
"fontfiles",
",",
"fontext",
"=",
"u'ttf'",
")",
":",
"fontlist",
"=",
"[",
"]",
"seen",
"=",
"{",
"}",
"for",
"fpath",
"in",
"fontfiles",
":",
"verbose",
".",
"report",
"(",
"(",
"u'createFontDict: %s'",
"%",
"fpath",
")",
",",
"u'debug'",
")",
"fname",
"=",
"os",
".",
"path",
".",
"split",
"(",
"fpath",
")",
"[",
"1",
"]",
"if",
"(",
"fname",
"in",
"seen",
")",
":",
"continue",
"else",
":",
"seen",
"[",
"fname",
"]",
"=",
"1",
"if",
"(",
"fontext",
"==",
"u'afm'",
")",
":",
"try",
":",
"fh",
"=",
"open",
"(",
"fpath",
",",
"u'rb'",
")",
"except",
"EnvironmentError",
":",
"verbose",
".",
"report",
"(",
"(",
"u'Could not open font file %s'",
"%",
"fpath",
")",
")",
"continue",
"try",
":",
"font",
"=",
"afm",
".",
"AFM",
"(",
"fh",
")",
"except",
"RuntimeError",
":",
"verbose",
".",
"report",
"(",
"(",
"u'Could not parse font file %s'",
"%",
"fpath",
")",
")",
"continue",
"finally",
":",
"fh",
".",
"close",
"(",
")",
"try",
":",
"prop",
"=",
"afmFontProperty",
"(",
"fpath",
",",
"font",
")",
"except",
"KeyError",
":",
"continue",
"else",
":",
"try",
":",
"font",
"=",
"ft2font",
".",
"FT2Font",
"(",
"fpath",
")",
"except",
"RuntimeError",
":",
"verbose",
".",
"report",
"(",
"(",
"u'Could not open font file %s'",
"%",
"fpath",
")",
")",
"continue",
"except",
"UnicodeError",
":",
"verbose",
".",
"report",
"(",
"u'Cannot handle unicode filenames'",
")",
"continue",
"except",
"IOError",
":",
"verbose",
".",
"report",
"(",
"(",
"u'IO error - cannot open font file %s'",
"%",
"fpath",
")",
")",
"continue",
"try",
":",
"prop",
"=",
"ttfFontProperty",
"(",
"font",
")",
"except",
"(",
"KeyError",
",",
"RuntimeError",
",",
"ValueError",
")",
":",
"continue",
"fontlist",
".",
"append",
"(",
"prop",
")",
"return",
"fontlist"
] |
a function to create a font lookup list .
|
train
| false
|
6,307
|
@gof.local_optimizer([sparse.AddSD])
def local_inplace_addsd_ccode(node):
if (isinstance(node.op, sparse.AddSD) and theano.config.cxx):
out_dtype = scalar.upcast(*node.inputs)
if (out_dtype != node.inputs[1].dtype):
return
new_node = AddSD_ccode(format=node.inputs[0].type.format, inplace=True)(*node.inputs)
return [new_node]
return False
|
[
"@",
"gof",
".",
"local_optimizer",
"(",
"[",
"sparse",
".",
"AddSD",
"]",
")",
"def",
"local_inplace_addsd_ccode",
"(",
"node",
")",
":",
"if",
"(",
"isinstance",
"(",
"node",
".",
"op",
",",
"sparse",
".",
"AddSD",
")",
"and",
"theano",
".",
"config",
".",
"cxx",
")",
":",
"out_dtype",
"=",
"scalar",
".",
"upcast",
"(",
"*",
"node",
".",
"inputs",
")",
"if",
"(",
"out_dtype",
"!=",
"node",
".",
"inputs",
"[",
"1",
"]",
".",
"dtype",
")",
":",
"return",
"new_node",
"=",
"AddSD_ccode",
"(",
"format",
"=",
"node",
".",
"inputs",
"[",
"0",
"]",
".",
"type",
".",
"format",
",",
"inplace",
"=",
"True",
")",
"(",
"*",
"node",
".",
"inputs",
")",
"return",
"[",
"new_node",
"]",
"return",
"False"
] |
optimization to insert inplace versions of addsd .
|
train
| false
|
6,308
|
def _item_to_project(iterator, resource):
return Project.from_api_repr(resource)
|
[
"def",
"_item_to_project",
"(",
"iterator",
",",
"resource",
")",
":",
"return",
"Project",
".",
"from_api_repr",
"(",
"resource",
")"
] |
convert a json project to the native object .
|
train
| false
|
6,309
|
@api_versions.wraps('2.35')
@utils.arg('--user', metavar='<user-id>', default=None, help=_('List key-pairs of specified user ID (Admin only).'))
@utils.arg('--marker', dest='marker', metavar='<marker>', default=None, help=_('The last keypair of the previous page; displays list of keypairs after "marker".'))
@utils.arg('--limit', dest='limit', metavar='<limit>', type=int, default=None, help=_("Maximum number of keypairs to display. If limit is bigger than 'CONF.api.max_limit' option of Nova API, limit 'CONF.api.max_limit' will be used instead."))
def do_keypair_list(cs, args):
keypairs = cs.keypairs.list(args.user, args.marker, args.limit)
columns = _get_keypairs_list_columns(cs, args)
utils.print_list(keypairs, columns)
|
[
"@",
"api_versions",
".",
"wraps",
"(",
"'2.35'",
")",
"@",
"utils",
".",
"arg",
"(",
"'--user'",
",",
"metavar",
"=",
"'<user-id>'",
",",
"default",
"=",
"None",
",",
"help",
"=",
"_",
"(",
"'List key-pairs of specified user ID (Admin only).'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'--marker'",
",",
"dest",
"=",
"'marker'",
",",
"metavar",
"=",
"'<marker>'",
",",
"default",
"=",
"None",
",",
"help",
"=",
"_",
"(",
"'The last keypair of the previous page; displays list of keypairs after \"marker\".'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'--limit'",
",",
"dest",
"=",
"'limit'",
",",
"metavar",
"=",
"'<limit>'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"None",
",",
"help",
"=",
"_",
"(",
"\"Maximum number of keypairs to display. If limit is bigger than 'CONF.api.max_limit' option of Nova API, limit 'CONF.api.max_limit' will be used instead.\"",
")",
")",
"def",
"do_keypair_list",
"(",
"cs",
",",
"args",
")",
":",
"keypairs",
"=",
"cs",
".",
"keypairs",
".",
"list",
"(",
"args",
".",
"user",
",",
"args",
".",
"marker",
",",
"args",
".",
"limit",
")",
"columns",
"=",
"_get_keypairs_list_columns",
"(",
"cs",
",",
"args",
")",
"utils",
".",
"print_list",
"(",
"keypairs",
",",
"columns",
")"
] |
print a list of keypairs for a user .
|
train
| false
|
6,310
|
def _BuildArgList(fdesc, names):
numArgs = max(fdesc[6], len(fdesc[2]))
names = list(names)
while (None in names):
i = names.index(None)
names[i] = ('arg%d' % (i,))
names = list(map(MakePublicAttributeName, names[1:]))
name_num = 0
while (len(names) < numArgs):
names.append(('arg%d' % (len(names),)))
for i in range(0, len(names), 5):
names[i] = (names[i] + '\n DCTB DCTB DCTB ')
return (',' + ', '.join(names))
|
[
"def",
"_BuildArgList",
"(",
"fdesc",
",",
"names",
")",
":",
"numArgs",
"=",
"max",
"(",
"fdesc",
"[",
"6",
"]",
",",
"len",
"(",
"fdesc",
"[",
"2",
"]",
")",
")",
"names",
"=",
"list",
"(",
"names",
")",
"while",
"(",
"None",
"in",
"names",
")",
":",
"i",
"=",
"names",
".",
"index",
"(",
"None",
")",
"names",
"[",
"i",
"]",
"=",
"(",
"'arg%d'",
"%",
"(",
"i",
",",
")",
")",
"names",
"=",
"list",
"(",
"map",
"(",
"MakePublicAttributeName",
",",
"names",
"[",
"1",
":",
"]",
")",
")",
"name_num",
"=",
"0",
"while",
"(",
"len",
"(",
"names",
")",
"<",
"numArgs",
")",
":",
"names",
".",
"append",
"(",
"(",
"'arg%d'",
"%",
"(",
"len",
"(",
"names",
")",
",",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"names",
")",
",",
"5",
")",
":",
"names",
"[",
"i",
"]",
"=",
"(",
"names",
"[",
"i",
"]",
"+",
"'\\n DCTB DCTB DCTB '",
")",
"return",
"(",
"','",
"+",
"', '",
".",
"join",
"(",
"names",
")",
")"
] |
builds list of args to the underlying invoke method .
|
train
| false
|
6,314
|
def shells():
shells_fn = '/etc/shells'
ret = []
if os.path.exists(shells_fn):
try:
with salt.utils.fopen(shells_fn, 'r') as shell_fp:
lines = shell_fp.read().splitlines()
for line in lines:
line = line.strip()
if line.startswith('#'):
continue
elif (not line):
continue
else:
ret.append(line)
except OSError:
log.error("File '{0}' was not found".format(shells_fn))
return ret
|
[
"def",
"shells",
"(",
")",
":",
"shells_fn",
"=",
"'/etc/shells'",
"ret",
"=",
"[",
"]",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"shells_fn",
")",
":",
"try",
":",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"shells_fn",
",",
"'r'",
")",
"as",
"shell_fp",
":",
"lines",
"=",
"shell_fp",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"for",
"line",
"in",
"lines",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
".",
"startswith",
"(",
"'#'",
")",
":",
"continue",
"elif",
"(",
"not",
"line",
")",
":",
"continue",
"else",
":",
"ret",
".",
"append",
"(",
"line",
")",
"except",
"OSError",
":",
"log",
".",
"error",
"(",
"\"File '{0}' was not found\"",
".",
"format",
"(",
"shells_fn",
")",
")",
"return",
"ret"
] |
lists the valid shells on this system via the /etc/shells file .
|
train
| false
|
6,315
|
def _urlmatcher_for_gcs_stub(url):
(_, host, _, _, _) = urlparse.urlsplit(url)
return (host == common.LOCAL_API_HOST)
|
[
"def",
"_urlmatcher_for_gcs_stub",
"(",
"url",
")",
":",
"(",
"_",
",",
"host",
",",
"_",
",",
"_",
",",
"_",
")",
"=",
"urlparse",
".",
"urlsplit",
"(",
"url",
")",
"return",
"(",
"host",
"==",
"common",
".",
"LOCAL_API_HOST",
")"
] |
determines whether a url should be handled by gcs stub .
|
train
| false
|
6,318
|
def get_power(**kwargs):
with _IpmiCommand(**kwargs) as s:
return s.get_power()['powerstate']
|
[
"def",
"get_power",
"(",
"**",
"kwargs",
")",
":",
"with",
"_IpmiCommand",
"(",
"**",
"kwargs",
")",
"as",
"s",
":",
"return",
"s",
".",
"get_power",
"(",
")",
"[",
"'powerstate'",
"]"
] |
get current power state the response .
|
train
| false
|
6,321
|
def _url_as_string(url):
if isinstance(url, Request_):
return url.get_full_url()
elif isinstance(url, str):
return url
else:
raise TypeError(('Expected type %r or %r' % (str, Request_)))
|
[
"def",
"_url_as_string",
"(",
"url",
")",
":",
"if",
"isinstance",
"(",
"url",
",",
"Request_",
")",
":",
"return",
"url",
".",
"get_full_url",
"(",
")",
"elif",
"isinstance",
"(",
"url",
",",
"str",
")",
":",
"return",
"url",
"else",
":",
"raise",
"TypeError",
"(",
"(",
"'Expected type %r or %r'",
"%",
"(",
"str",
",",
"Request_",
")",
")",
")"
] |
returns the url string from a url value that is either a string or urllib2 .
|
train
| false
|
6,322
|
def retrieve_seq_length_op2(data):
return tf.reduce_sum(tf.cast(tf.greater(data, tf.zeros_like(data)), tf.int32), 1)
|
[
"def",
"retrieve_seq_length_op2",
"(",
"data",
")",
":",
"return",
"tf",
".",
"reduce_sum",
"(",
"tf",
".",
"cast",
"(",
"tf",
".",
"greater",
"(",
"data",
",",
"tf",
".",
"zeros_like",
"(",
"data",
")",
")",
",",
"tf",
".",
"int32",
")",
",",
"1",
")"
] |
an op to compute the length of a sequence .
|
train
| true
|
6,324
|
def _create_ansi_color_dict(color_cls):
return {u'ansidefault': color_cls.BLACK, u'ansiblack': color_cls.BLACK, u'ansidarkgray': (color_cls.BLACK | color_cls.INTENSITY), u'ansilightgray': color_cls.GRAY, u'ansiwhite': (color_cls.GRAY | color_cls.INTENSITY), u'ansidarkred': color_cls.RED, u'ansidarkgreen': color_cls.GREEN, u'ansibrown': color_cls.YELLOW, u'ansidarkblue': color_cls.BLUE, u'ansipurple': color_cls.MAGENTA, u'ansiteal': color_cls.CYAN, u'ansired': (color_cls.RED | color_cls.INTENSITY), u'ansigreen': (color_cls.GREEN | color_cls.INTENSITY), u'ansiyellow': (color_cls.YELLOW | color_cls.INTENSITY), u'ansiblue': (color_cls.BLUE | color_cls.INTENSITY), u'ansifuchsia': (color_cls.MAGENTA | color_cls.INTENSITY), u'ansiturquoise': (color_cls.CYAN | color_cls.INTENSITY)}
|
[
"def",
"_create_ansi_color_dict",
"(",
"color_cls",
")",
":",
"return",
"{",
"u'ansidefault'",
":",
"color_cls",
".",
"BLACK",
",",
"u'ansiblack'",
":",
"color_cls",
".",
"BLACK",
",",
"u'ansidarkgray'",
":",
"(",
"color_cls",
".",
"BLACK",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansilightgray'",
":",
"color_cls",
".",
"GRAY",
",",
"u'ansiwhite'",
":",
"(",
"color_cls",
".",
"GRAY",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansidarkred'",
":",
"color_cls",
".",
"RED",
",",
"u'ansidarkgreen'",
":",
"color_cls",
".",
"GREEN",
",",
"u'ansibrown'",
":",
"color_cls",
".",
"YELLOW",
",",
"u'ansidarkblue'",
":",
"color_cls",
".",
"BLUE",
",",
"u'ansipurple'",
":",
"color_cls",
".",
"MAGENTA",
",",
"u'ansiteal'",
":",
"color_cls",
".",
"CYAN",
",",
"u'ansired'",
":",
"(",
"color_cls",
".",
"RED",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansigreen'",
":",
"(",
"color_cls",
".",
"GREEN",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansiyellow'",
":",
"(",
"color_cls",
".",
"YELLOW",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansiblue'",
":",
"(",
"color_cls",
".",
"BLUE",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansifuchsia'",
":",
"(",
"color_cls",
".",
"MAGENTA",
"|",
"color_cls",
".",
"INTENSITY",
")",
",",
"u'ansiturquoise'",
":",
"(",
"color_cls",
".",
"CYAN",
"|",
"color_cls",
".",
"INTENSITY",
")",
"}"
] |
create a table that maps the 16 named ansi colors to their windows code .
|
train
| false
|
6,325
|
def getPointsFromSegmentTable(segmentTable):
points = []
endpoints = euclidean.getEndpointsFromSegmentTable(segmentTable)
for endpoint in endpoints:
points.append(endpoint.point)
return points
|
[
"def",
"getPointsFromSegmentTable",
"(",
"segmentTable",
")",
":",
"points",
"=",
"[",
"]",
"endpoints",
"=",
"euclidean",
".",
"getEndpointsFromSegmentTable",
"(",
"segmentTable",
")",
"for",
"endpoint",
"in",
"endpoints",
":",
"points",
".",
"append",
"(",
"endpoint",
".",
"point",
")",
"return",
"points"
] |
get the points from the segment table .
|
train
| false
|
6,326
|
def dict_to_xml(metadata_dict):
build = ET.Element('build')
for (k, v) in metadata_dict.iteritems():
node = ET.SubElement(build, k)
node.text = v
return ET.tostring(build)
|
[
"def",
"dict_to_xml",
"(",
"metadata_dict",
")",
":",
"build",
"=",
"ET",
".",
"Element",
"(",
"'build'",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"metadata_dict",
".",
"iteritems",
"(",
")",
":",
"node",
"=",
"ET",
".",
"SubElement",
"(",
"build",
",",
"k",
")",
"node",
".",
"text",
"=",
"v",
"return",
"ET",
".",
"tostring",
"(",
"build",
")"
] |
turn a simple dict of key/value pairs into xml .
|
train
| false
|
6,327
|
def enable_ssh():
_current = global_settings()
if (_current['Global Settings']['SSH_STATUS']['VALUE'] == 'Y'):
return True
_xml = '<RIBCL VERSION="2.0">\n <LOGIN USER_LOGIN="adminname" PASSWORD="password">\n <RIB_INFO MODE="write">\n <MOD_GLOBAL_SETTINGS>\n <SSH_STATUS value="Yes"/>\n </MOD_GLOBAL_SETTINGS>\n </RIB_INFO>\n </LOGIN>\n </RIBCL>'
return __execute_cmd('Enable_SSH', _xml)
|
[
"def",
"enable_ssh",
"(",
")",
":",
"_current",
"=",
"global_settings",
"(",
")",
"if",
"(",
"_current",
"[",
"'Global Settings'",
"]",
"[",
"'SSH_STATUS'",
"]",
"[",
"'VALUE'",
"]",
"==",
"'Y'",
")",
":",
"return",
"True",
"_xml",
"=",
"'<RIBCL VERSION=\"2.0\">\\n <LOGIN USER_LOGIN=\"adminname\" PASSWORD=\"password\">\\n <RIB_INFO MODE=\"write\">\\n <MOD_GLOBAL_SETTINGS>\\n <SSH_STATUS value=\"Yes\"/>\\n </MOD_GLOBAL_SETTINGS>\\n </RIB_INFO>\\n </LOGIN>\\n </RIBCL>'",
"return",
"__execute_cmd",
"(",
"'Enable_SSH'",
",",
"_xml",
")"
] |
enable the ssh daemon cli example: .
|
train
| false
|
6,328
|
def relpath_to_config_or_make(filename):
prefix = _find_prefix(filename)
return os.path.relpath(os.path.dirname(filename), prefix)
|
[
"def",
"relpath_to_config_or_make",
"(",
"filename",
")",
":",
"prefix",
"=",
"_find_prefix",
"(",
"filename",
")",
"return",
"os",
".",
"path",
".",
"relpath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"filename",
")",
",",
"prefix",
")"
] |
the following is refactored out of hook-sysconfig and hook-distutils .
|
train
| false
|
6,329
|
def send_notification(device_name):
current_time = datetime.now()
sender = 'sender@domain.com'
recipient = 'recipient@domain.com'
subject = 'Device {0} was modified'.format(device_name)
message = '\nThe running configuration of {0} was modified. \n\nThis change was detected at: {1}\n\n'.format(device_name, current_time)
if send_mail(recipient, subject, message, sender):
print 'Email notification sent to {}'.format(recipient)
return True
|
[
"def",
"send_notification",
"(",
"device_name",
")",
":",
"current_time",
"=",
"datetime",
".",
"now",
"(",
")",
"sender",
"=",
"'sender@domain.com'",
"recipient",
"=",
"'recipient@domain.com'",
"subject",
"=",
"'Device {0} was modified'",
".",
"format",
"(",
"device_name",
")",
"message",
"=",
"'\\nThe running configuration of {0} was modified. \\n\\nThis change was detected at: {1}\\n\\n'",
".",
"format",
"(",
"device_name",
",",
"current_time",
")",
"if",
"send_mail",
"(",
"recipient",
",",
"subject",
",",
"message",
",",
"sender",
")",
":",
"print",
"'Email notification sent to {}'",
".",
"format",
"(",
"recipient",
")",
"return",
"True"
] |
notify concerned persons about recurring document generation .
|
train
| false
|
6,330
|
@register.filter
def can_write(obj, user):
return obj.can_write(user)
|
[
"@",
"register",
".",
"filter",
"def",
"can_write",
"(",
"obj",
",",
"user",
")",
":",
"return",
"obj",
".",
"can_write",
"(",
"user",
")"
] |
takes article or related to article model .
|
train
| false
|
6,331
|
def getProfileBaseNameSynonym(repository):
if (repository.getProfileDirectory == None):
return repository.baseNameSynonym
return os.path.join(repository.getProfileDirectory(), repository.baseNameSynonym)
|
[
"def",
"getProfileBaseNameSynonym",
"(",
"repository",
")",
":",
"if",
"(",
"repository",
".",
"getProfileDirectory",
"==",
"None",
")",
":",
"return",
"repository",
".",
"baseNameSynonym",
"return",
"os",
".",
"path",
".",
"join",
"(",
"repository",
".",
"getProfileDirectory",
"(",
")",
",",
"repository",
".",
"baseNameSynonym",
")"
] |
get the profile base file name synonym .
|
train
| false
|
6,332
|
def delete_(*keyname):
mdata = _check_mdata_delete()
valid_keynames = list_()
ret = {}
for k in keyname:
if (mdata and (k in valid_keynames)):
cmd = '{0} {1}'.format(mdata, k)
ret[k] = (__salt__['cmd.run_all'](cmd)['retcode'] == 0)
else:
ret[k] = True
return ret
|
[
"def",
"delete_",
"(",
"*",
"keyname",
")",
":",
"mdata",
"=",
"_check_mdata_delete",
"(",
")",
"valid_keynames",
"=",
"list_",
"(",
")",
"ret",
"=",
"{",
"}",
"for",
"k",
"in",
"keyname",
":",
"if",
"(",
"mdata",
"and",
"(",
"k",
"in",
"valid_keynames",
")",
")",
":",
"cmd",
"=",
"'{0} {1}'",
".",
"format",
"(",
"mdata",
",",
"k",
")",
"ret",
"[",
"k",
"]",
"=",
"(",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"[",
"'retcode'",
"]",
"==",
"0",
")",
"else",
":",
"ret",
"[",
"k",
"]",
"=",
"True",
"return",
"ret"
] |
delete metadata prop : string name of property cli example: .
|
train
| true
|
6,333
|
def organisation():
def prep(r):
query = (FS('organisation_id:po_referral_organisation.id') != None)
r.resource.add_filter(query)
onaccept = s3db.get_config('org_organisation', 'onaccept')
s3db.configure('org_organisation', onaccept=(onaccept, s3db.po_organisation_onaccept))
if (r.record and (r.component_name == 'organisation_household')):
atable = s3db.po_organisation_area
query = ((atable.organisation_id == r.id) & (atable.deleted != True))
rows = db(query).select(atable.area_id)
if rows:
area_ids = [row.area_id for row in rows]
area_ids.append(None)
table = r.component.table
table.household_id.requires.set_filter(filterby='area_id', filter_opts=area_ids)
elif (not r.component):
list_fields = ['name', 'name', (T('Type'), 'organisation_organisation_type.organisation_type_id'), (T('Areas'), 'organisation_area.area_id'), 'website']
s3db.configure('org_organisation', list_fields=list_fields)
if r.interactive:
s3.crud_strings['org_organisation'].update({'label_create': T('Create Agency'), 'title_list': T('Referral Agencies'), 'title_display': T('Agency Details'), 'title_update': T('Edit Agency Details'), 'label_delete_button': T('Delete Agency')})
if (r.component_name == 'area'):
s3.crud_strings['po_organisation_area'].update({'label_create': T('Add Area')})
return True
s3.prep = prep
return s3_rest_controller('org', 'organisation', rheader=s3db.po_rheader)
|
[
"def",
"organisation",
"(",
")",
":",
"def",
"prep",
"(",
"r",
")",
":",
"query",
"=",
"(",
"FS",
"(",
"'organisation_id:po_referral_organisation.id'",
")",
"!=",
"None",
")",
"r",
".",
"resource",
".",
"add_filter",
"(",
"query",
")",
"onaccept",
"=",
"s3db",
".",
"get_config",
"(",
"'org_organisation'",
",",
"'onaccept'",
")",
"s3db",
".",
"configure",
"(",
"'org_organisation'",
",",
"onaccept",
"=",
"(",
"onaccept",
",",
"s3db",
".",
"po_organisation_onaccept",
")",
")",
"if",
"(",
"r",
".",
"record",
"and",
"(",
"r",
".",
"component_name",
"==",
"'organisation_household'",
")",
")",
":",
"atable",
"=",
"s3db",
".",
"po_organisation_area",
"query",
"=",
"(",
"(",
"atable",
".",
"organisation_id",
"==",
"r",
".",
"id",
")",
"&",
"(",
"atable",
".",
"deleted",
"!=",
"True",
")",
")",
"rows",
"=",
"db",
"(",
"query",
")",
".",
"select",
"(",
"atable",
".",
"area_id",
")",
"if",
"rows",
":",
"area_ids",
"=",
"[",
"row",
".",
"area_id",
"for",
"row",
"in",
"rows",
"]",
"area_ids",
".",
"append",
"(",
"None",
")",
"table",
"=",
"r",
".",
"component",
".",
"table",
"table",
".",
"household_id",
".",
"requires",
".",
"set_filter",
"(",
"filterby",
"=",
"'area_id'",
",",
"filter_opts",
"=",
"area_ids",
")",
"elif",
"(",
"not",
"r",
".",
"component",
")",
":",
"list_fields",
"=",
"[",
"'name'",
",",
"'name'",
",",
"(",
"T",
"(",
"'Type'",
")",
",",
"'organisation_organisation_type.organisation_type_id'",
")",
",",
"(",
"T",
"(",
"'Areas'",
")",
",",
"'organisation_area.area_id'",
")",
",",
"'website'",
"]",
"s3db",
".",
"configure",
"(",
"'org_organisation'",
",",
"list_fields",
"=",
"list_fields",
")",
"if",
"r",
".",
"interactive",
":",
"s3",
".",
"crud_strings",
"[",
"'org_organisation'",
"]",
".",
"update",
"(",
"{",
"'label_create'",
":",
"T",
"(",
"'Create Agency'",
")",
",",
"'title_list'",
":",
"T",
"(",
"'Referral Agencies'",
")",
",",
"'title_display'",
":",
"T",
"(",
"'Agency Details'",
")",
",",
"'title_update'",
":",
"T",
"(",
"'Edit Agency Details'",
")",
",",
"'label_delete_button'",
":",
"T",
"(",
"'Delete Agency'",
")",
"}",
")",
"if",
"(",
"r",
".",
"component_name",
"==",
"'area'",
")",
":",
"s3",
".",
"crud_strings",
"[",
"'po_organisation_area'",
"]",
".",
"update",
"(",
"{",
"'label_create'",
":",
"T",
"(",
"'Add Area'",
")",
"}",
")",
"return",
"True",
"s3",
".",
"prep",
"=",
"prep",
"return",
"s3_rest_controller",
"(",
"'org'",
",",
"'organisation'",
",",
"rheader",
"=",
"s3db",
".",
"po_rheader",
")"
] |
restful crud controller .
|
train
| false
|
6,334
|
def getBranchMatrixSetElementNode(elementNode):
branchMatrix = getBranchMatrix(elementNode)
setElementNodeDictionaryMatrix(elementNode, branchMatrix)
return branchMatrix
|
[
"def",
"getBranchMatrixSetElementNode",
"(",
"elementNode",
")",
":",
"branchMatrix",
"=",
"getBranchMatrix",
"(",
"elementNode",
")",
"setElementNodeDictionaryMatrix",
"(",
"elementNode",
",",
"branchMatrix",
")",
"return",
"branchMatrix"
] |
get matrix starting from the object if it exists .
|
train
| false
|
6,335
|
def add_ops(op_classes):
def f(cls):
for (op_attr_name, op_class) in compat.iteritems(op_classes):
ops = getattr(cls, '{0}_ops'.format(op_attr_name))
ops_map = getattr(cls, '{0}_op_nodes_map'.format(op_attr_name))
for op in ops:
op_node = ops_map[op]
if (op_node is not None):
made_op = _op_maker(op_class, op)
setattr(cls, 'visit_{0}'.format(op_node), made_op)
return cls
return f
|
[
"def",
"add_ops",
"(",
"op_classes",
")",
":",
"def",
"f",
"(",
"cls",
")",
":",
"for",
"(",
"op_attr_name",
",",
"op_class",
")",
"in",
"compat",
".",
"iteritems",
"(",
"op_classes",
")",
":",
"ops",
"=",
"getattr",
"(",
"cls",
",",
"'{0}_ops'",
".",
"format",
"(",
"op_attr_name",
")",
")",
"ops_map",
"=",
"getattr",
"(",
"cls",
",",
"'{0}_op_nodes_map'",
".",
"format",
"(",
"op_attr_name",
")",
")",
"for",
"op",
"in",
"ops",
":",
"op_node",
"=",
"ops_map",
"[",
"op",
"]",
"if",
"(",
"op_node",
"is",
"not",
"None",
")",
":",
"made_op",
"=",
"_op_maker",
"(",
"op_class",
",",
"op",
")",
"setattr",
"(",
"cls",
",",
"'visit_{0}'",
".",
"format",
"(",
"op_node",
")",
",",
"made_op",
")",
"return",
"cls",
"return",
"f"
] |
decorator to add default implementation of ops .
|
train
| false
|
6,336
|
def _ipv4_to_bits(ipaddr):
return ''.join([bin(int(x))[2:].rjust(8, '0') for x in ipaddr.split('.')])
|
[
"def",
"_ipv4_to_bits",
"(",
"ipaddr",
")",
":",
"return",
"''",
".",
"join",
"(",
"[",
"bin",
"(",
"int",
"(",
"x",
")",
")",
"[",
"2",
":",
"]",
".",
"rjust",
"(",
"8",
",",
"'0'",
")",
"for",
"x",
"in",
"ipaddr",
".",
"split",
"(",
"'.'",
")",
"]",
")"
] |
accepts an ipv4 dotted quad and returns a string representing its binary counterpart .
|
train
| true
|
6,337
|
def succeed_with_changes(name):
ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'Success!'}
ret['changes'] = {'testing': {'old': 'Unchanged', 'new': 'Something pretended to change'}}
if __opts__['test']:
ret['result'] = None
ret['comment'] = "If we weren't testing, this would be successful with changes"
return ret
|
[
"def",
"succeed_with_changes",
"(",
"name",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"True",
",",
"'comment'",
":",
"'Success!'",
"}",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'testing'",
":",
"{",
"'old'",
":",
"'Unchanged'",
",",
"'new'",
":",
"'Something pretended to change'",
"}",
"}",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"'result'",
"]",
"=",
"None",
"ret",
"[",
"'comment'",
"]",
"=",
"\"If we weren't testing, this would be successful with changes\"",
"return",
"ret"
] |
returns successful and changes is not empty .
|
train
| false
|
6,338
|
def get_generator_names_descriptions():
descs = []
for language in registered_languages:
for generator in language.html_generators:
description = getattr(generator, 'description', None)
if (description is None):
description = generator.name
descs.append((generator.name, description))
return descs
|
[
"def",
"get_generator_names_descriptions",
"(",
")",
":",
"descs",
"=",
"[",
"]",
"for",
"language",
"in",
"registered_languages",
":",
"for",
"generator",
"in",
"language",
".",
"html_generators",
":",
"description",
"=",
"getattr",
"(",
"generator",
",",
"'description'",
",",
"None",
")",
"if",
"(",
"description",
"is",
"None",
")",
":",
"description",
"=",
"generator",
".",
"name",
"descs",
".",
"append",
"(",
"(",
"generator",
".",
"name",
",",
"description",
")",
")",
"return",
"descs"
] |
return a tuple of the name and description .
|
train
| false
|
6,340
|
def get_flowgram_ali_exe():
return 'FlowgramAli_4frame'
|
[
"def",
"get_flowgram_ali_exe",
"(",
")",
":",
"return",
"'FlowgramAli_4frame'"
] |
return the executable name of the flowgram alignment prog .
|
train
| false
|
6,341
|
@pytest.mark.django_db
def test_verify_user(member_with_email):
with pytest.raises(EmailAddress.DoesNotExist):
EmailAddress.objects.get(user=member_with_email, verified=True)
accounts.utils.verify_user(member_with_email)
EmailAddress.objects.get(user=member_with_email, email='member_with_email@this.test', primary=True, verified=True)
|
[
"@",
"pytest",
".",
"mark",
".",
"django_db",
"def",
"test_verify_user",
"(",
"member_with_email",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"EmailAddress",
".",
"DoesNotExist",
")",
":",
"EmailAddress",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"member_with_email",
",",
"verified",
"=",
"True",
")",
"accounts",
".",
"utils",
".",
"verify_user",
"(",
"member_with_email",
")",
"EmailAddress",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"member_with_email",
",",
"email",
"=",
"'member_with_email@this.test'",
",",
"primary",
"=",
"True",
",",
"verified",
"=",
"True",
")"
] |
test verifying user using verify_user function .
|
train
| false
|
6,342
|
def pretty_atom(atom_name, default=None):
if _use_unicode:
return atoms_table[atom_name]
else:
if (default is not None):
return default
raise KeyError('only unicode')
|
[
"def",
"pretty_atom",
"(",
"atom_name",
",",
"default",
"=",
"None",
")",
":",
"if",
"_use_unicode",
":",
"return",
"atoms_table",
"[",
"atom_name",
"]",
"else",
":",
"if",
"(",
"default",
"is",
"not",
"None",
")",
":",
"return",
"default",
"raise",
"KeyError",
"(",
"'only unicode'",
")"
] |
return pretty representation of an atom .
|
train
| false
|
6,344
|
def PRGA(S):
i = 0
j = 0
while True:
i = ((i + 1) % 256)
j = ((j + S[i]) % 256)
(S[i], S[j]) = (S[j], S[i])
K = S[((S[i] + S[j]) % 256)]
(yield K)
|
[
"def",
"PRGA",
"(",
"S",
")",
":",
"i",
"=",
"0",
"j",
"=",
"0",
"while",
"True",
":",
"i",
"=",
"(",
"(",
"i",
"+",
"1",
")",
"%",
"256",
")",
"j",
"=",
"(",
"(",
"j",
"+",
"S",
"[",
"i",
"]",
")",
"%",
"256",
")",
"(",
"S",
"[",
"i",
"]",
",",
"S",
"[",
"j",
"]",
")",
"=",
"(",
"S",
"[",
"j",
"]",
",",
"S",
"[",
"i",
"]",
")",
"K",
"=",
"S",
"[",
"(",
"(",
"S",
"[",
"i",
"]",
"+",
"S",
"[",
"j",
"]",
")",
"%",
"256",
")",
"]",
"(",
"yield",
"K",
")"
] |
run pseudo-random generation algorithm .
|
train
| true
|
6,345
|
def _rewrite_sin(m_n, s, a, b):
from sympy import expand_mul, pi, ceiling, gamma
(m, n) = m_n
m = expand_mul((m / pi))
n = expand_mul((n / pi))
r = ceiling((((- m) * a) - n.as_real_imag()[0]))
return (gamma((((m * s) + n) + r)), gamma((((1 - n) - r) - (m * s))), (((-1) ** r) * pi))
|
[
"def",
"_rewrite_sin",
"(",
"m_n",
",",
"s",
",",
"a",
",",
"b",
")",
":",
"from",
"sympy",
"import",
"expand_mul",
",",
"pi",
",",
"ceiling",
",",
"gamma",
"(",
"m",
",",
"n",
")",
"=",
"m_n",
"m",
"=",
"expand_mul",
"(",
"(",
"m",
"/",
"pi",
")",
")",
"n",
"=",
"expand_mul",
"(",
"(",
"n",
"/",
"pi",
")",
")",
"r",
"=",
"ceiling",
"(",
"(",
"(",
"(",
"-",
"m",
")",
"*",
"a",
")",
"-",
"n",
".",
"as_real_imag",
"(",
")",
"[",
"0",
"]",
")",
")",
"return",
"(",
"gamma",
"(",
"(",
"(",
"(",
"m",
"*",
"s",
")",
"+",
"n",
")",
"+",
"r",
")",
")",
",",
"gamma",
"(",
"(",
"(",
"(",
"1",
"-",
"n",
")",
"-",
"r",
")",
"-",
"(",
"m",
"*",
"s",
")",
")",
")",
",",
"(",
"(",
"(",
"-",
"1",
")",
"**",
"r",
")",
"*",
"pi",
")",
")"
] |
re-write the sine function sin as gamma functions .
|
train
| false
|
6,348
|
def isunauthenticated(func):
return getattr(func, 'unauthenticated', False)
|
[
"def",
"isunauthenticated",
"(",
"func",
")",
":",
"return",
"getattr",
"(",
"func",
",",
"'unauthenticated'",
",",
"False",
")"
] |
checks to see if the function is marked as not requiring authentication with the @unauthenticated decorator .
|
train
| false
|
6,349
|
def convert_db_torrent_to_json(torrent, include_rel_score=False):
torrent_name = torrent[2]
if ((torrent_name is None) or (len(torrent_name.strip()) == 0)):
torrent_name = 'Unnamed torrent'
res_json = {'id': torrent[0], 'infohash': torrent[1].encode('hex'), 'name': torrent_name, 'size': torrent[3], 'category': torrent[4], 'num_seeders': (torrent[5] or 0), 'num_leechers': (torrent[6] or 0), 'last_tracker_check': (torrent[7] or 0)}
if include_rel_score:
res_json['relevance_score'] = torrent[9]
return res_json
|
[
"def",
"convert_db_torrent_to_json",
"(",
"torrent",
",",
"include_rel_score",
"=",
"False",
")",
":",
"torrent_name",
"=",
"torrent",
"[",
"2",
"]",
"if",
"(",
"(",
"torrent_name",
"is",
"None",
")",
"or",
"(",
"len",
"(",
"torrent_name",
".",
"strip",
"(",
")",
")",
"==",
"0",
")",
")",
":",
"torrent_name",
"=",
"'Unnamed torrent'",
"res_json",
"=",
"{",
"'id'",
":",
"torrent",
"[",
"0",
"]",
",",
"'infohash'",
":",
"torrent",
"[",
"1",
"]",
".",
"encode",
"(",
"'hex'",
")",
",",
"'name'",
":",
"torrent_name",
",",
"'size'",
":",
"torrent",
"[",
"3",
"]",
",",
"'category'",
":",
"torrent",
"[",
"4",
"]",
",",
"'num_seeders'",
":",
"(",
"torrent",
"[",
"5",
"]",
"or",
"0",
")",
",",
"'num_leechers'",
":",
"(",
"torrent",
"[",
"6",
"]",
"or",
"0",
")",
",",
"'last_tracker_check'",
":",
"(",
"torrent",
"[",
"7",
"]",
"or",
"0",
")",
"}",
"if",
"include_rel_score",
":",
"res_json",
"[",
"'relevance_score'",
"]",
"=",
"torrent",
"[",
"9",
"]",
"return",
"res_json"
] |
this method converts a torrent in the database to a json dictionary .
|
train
| false
|
6,350
|
def taggedsent_to_conll(sentence):
for (i, (word, tag)) in enumerate(sentence, start=1):
input_str = [str(i), word, '_', tag, tag, '_', '0', 'a', '_', '_']
input_str = (' DCTB '.join(input_str) + '\n')
(yield input_str)
|
[
"def",
"taggedsent_to_conll",
"(",
"sentence",
")",
":",
"for",
"(",
"i",
",",
"(",
"word",
",",
"tag",
")",
")",
"in",
"enumerate",
"(",
"sentence",
",",
"start",
"=",
"1",
")",
":",
"input_str",
"=",
"[",
"str",
"(",
"i",
")",
",",
"word",
",",
"'_'",
",",
"tag",
",",
"tag",
",",
"'_'",
",",
"'0'",
",",
"'a'",
",",
"'_'",
",",
"'_'",
"]",
"input_str",
"=",
"(",
"' DCTB '",
".",
"join",
"(",
"input_str",
")",
"+",
"'\\n'",
")",
"(",
"yield",
"input_str",
")"
] |
a module to convert a single pos tagged sentence into conll format .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.