id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
31,365 | def _est_time_to_hour(cluster_summary, now=None):
if (now is None):
now = datetime.utcnow()
timeline = getattr(getattr(cluster_summary, 'status', None), 'timeline', None)
creationdatetime = getattr(timeline, 'creationdatetime', None)
if creationdatetime:
start = iso8601_to_datetime(creationdatetime)
else:
return timedelta(minutes=60)
run_time = (now - start)
return timedelta(seconds=(((- run_time).seconds % 3600.0) or 3600.0))
| [
"def",
"_est_time_to_hour",
"(",
"cluster_summary",
",",
"now",
"=",
"None",
")",
":",
"if",
"(",
"now",
"is",
"None",
")",
":",
"now",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"timeline",
"=",
"getattr",
"(",
"getattr",
"(",
"cluster_summary",
",",
"... | how long before job reaches the end of the next full hour since it began . | train | false |
31,366 | def modify(name, **kwargs):
ret = {'comment': '', 'changes': {}, 'result': True}
time_conflict = False
for item in ['seconds', 'minutes', 'hours', 'days']:
if ((item in kwargs) and ('when' in kwargs)):
time_conflict = True
if ((item in kwargs) and ('cron' in kwargs)):
time_conflict = True
if time_conflict:
ret['result'] = False
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" option.'
return ret
if (('when' in kwargs) and ('cron' in kwargs)):
ret['result'] = False
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
return ret
current_schedule = list_(show_all=True, return_yaml=False)
if (name not in current_schedule):
ret['comment'] = 'Job {0} does not exist in schedule.'.format(name)
ret['result'] = False
return ret
_current = current_schedule[name]
if ('_seconds' in _current):
_current['seconds'] = _current['_seconds']
del _current['_seconds']
_new = build_schedule_item(name, **kwargs)
if (_new == _current):
ret['comment'] = 'Job {0} in correct state'.format(name)
return ret
_current_lines = ['{0}:{1}\n'.format(key, value) for (key, value) in sorted(_current.items())]
_new_lines = ['{0}:{1}\n'.format(key, value) for (key, value) in sorted(_new.items())]
_diff = difflib.unified_diff(_current_lines, _new_lines)
ret['changes']['diff'] = ''.join(_diff)
if (('test' in kwargs) and kwargs['test']):
ret['comment'] = 'Job: {0} would be modified in schedule.'.format(name)
else:
persist = True
if ('persist' in kwargs):
persist = kwargs['persist']
if (name in list_(show_all=True, where='opts', return_yaml=False)):
event_data = {'name': name, 'schedule': _new, 'func': 'modify', 'persist': persist}
elif (name in list_(show_all=True, where='pillar', return_yaml=False)):
event_data = {'name': name, 'schedule': _new, 'where': 'pillar', 'func': 'modify', 'persist': False}
out = __salt__['event.fire'](event_data, 'manage_schedule')
if out:
ret['comment'] = 'Modified job: {0} in schedule.'.format(name)
else:
ret['comment'] = 'Failed to modify job {0} in schedule.'.format(name)
ret['result'] = False
return ret
| [
"def",
"modify",
"(",
"name",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"{",
"'comment'",
":",
"''",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"True",
"}",
"time_conflict",
"=",
"False",
"for",
"item",
"in",
"[",
"'seconds'",
",",
"'m... | modify section 9 . | train | true |
31,367 | @register.simple_tag
def locale_dir():
return ((trans_real.get_language_bidi() and 'rtl') or 'ltr')
| [
"@",
"register",
".",
"simple_tag",
"def",
"locale_dir",
"(",
")",
":",
"return",
"(",
"(",
"trans_real",
".",
"get_language_bidi",
"(",
")",
"and",
"'rtl'",
")",
"or",
"'ltr'",
")"
] | returns current locales direction . | train | false |
31,368 | def run_remotely(username, address, commands, port=22, log_command_filter=identity):
return Effect(RunRemotely(username=username, address=address, commands=commands, port=port, log_command_filter=log_command_filter))
| [
"def",
"run_remotely",
"(",
"username",
",",
"address",
",",
"commands",
",",
"port",
"=",
"22",
",",
"log_command_filter",
"=",
"identity",
")",
":",
"return",
"Effect",
"(",
"RunRemotely",
"(",
"username",
"=",
"username",
",",
"address",
"=",
"address",
... | run some commands on a remote host . | train | false |
31,369 | def generate_conflicting_plot_options_with_json_writes_of_config():
def gen_test(plot_options):
def test(self):
config = _json.load(open(CONFIG_FILE))
with open(CONFIG_FILE, 'w') as f:
config.update(plot_options)
f.write(_json.dumps(config))
self.assertRaises(PlotlyError, py._plot_option_logic, {})
return test
for (i, plot_options) in enumerate(TestPlotOptionLogic.conflicting_option_set):
setattr(TestPlotOptionLogic, 'test_conflicting_plot_options_with_json_writes_of_config{}'.format(i), gen_test(plot_options))
| [
"def",
"generate_conflicting_plot_options_with_json_writes_of_config",
"(",
")",
":",
"def",
"gen_test",
"(",
"plot_options",
")",
":",
"def",
"test",
"(",
"self",
")",
":",
"config",
"=",
"_json",
".",
"load",
"(",
"open",
"(",
"CONFIG_FILE",
")",
")",
"with"... | if the user wrote their own options in the config file . | train | false |
31,371 | def legalize_path(path, replacements, length, extension, fragment):
if fragment:
extension = extension.decode('utf-8', 'ignore')
(first_stage_path, _) = _legalize_stage(path, replacements, length, extension, fragment)
(first_stage_path, _) = os.path.splitext(displayable_path(first_stage_path))
(second_stage_path, retruncated) = _legalize_stage(first_stage_path, replacements, length, extension, fragment)
if retruncated:
(second_stage_path, _) = _legalize_stage(first_stage_path, None, length, extension, fragment)
return (second_stage_path, retruncated)
| [
"def",
"legalize_path",
"(",
"path",
",",
"replacements",
",",
"length",
",",
"extension",
",",
"fragment",
")",
":",
"if",
"fragment",
":",
"extension",
"=",
"extension",
".",
"decode",
"(",
"'utf-8'",
",",
"'ignore'",
")",
"(",
"first_stage_path",
",",
"... | given a path-like unicode string . | train | false |
31,372 | def get_config_path(home_dir=None):
val = op.join(_get_extra_data_path(home_dir=home_dir), 'mne-python.json')
return val
| [
"def",
"get_config_path",
"(",
"home_dir",
"=",
"None",
")",
":",
"val",
"=",
"op",
".",
"join",
"(",
"_get_extra_data_path",
"(",
"home_dir",
"=",
"home_dir",
")",
",",
"'mne-python.json'",
")",
"return",
"val"
] | get path to standard mne-python config file . | train | false |
31,373 | def get_required_packages(file_contents):
if (file_contents.count(INST_REQS_KWARG) != 1):
raise ValueError('Expected only one use of keyword', INST_REQS_KWARG, file_contents)
keyword_stmt = ((INST_REQS_KWARG + '=') + REQ_VAR)
if (file_contents.count(keyword_stmt) != 1):
raise ValueError('Expected keyword to be set with variable', INST_REQS_KWARG, REQ_VAR, file_contents)
(_, reqs_section, _) = file_contents.split(REQ_VAR)
reqs_begin = reqs_section.index('[')
reqs_end = (reqs_section.index(']') + 1)
reqs_list_text = reqs_section[reqs_begin:reqs_end]
requirements = ast.literal_eval(reqs_list_text)
result = []
for required in requirements:
parts = required.split()
result.append(parts[0])
return result
| [
"def",
"get_required_packages",
"(",
"file_contents",
")",
":",
"if",
"(",
"file_contents",
".",
"count",
"(",
"INST_REQS_KWARG",
")",
"!=",
"1",
")",
":",
"raise",
"ValueError",
"(",
"'Expected only one use of keyword'",
",",
"INST_REQS_KWARG",
",",
"file_contents"... | get required packages from a setup . | train | false |
31,375 | @asyncio.coroutine
@hook.irc_raw('PART')
def on_join(conn, chan, target):
if (target == conn.nick):
bot_joined_channel(conn, chan)
| [
"@",
"asyncio",
".",
"coroutine",
"@",
"hook",
".",
"irc_raw",
"(",
"'PART'",
")",
"def",
"on_join",
"(",
"conn",
",",
"chan",
",",
"target",
")",
":",
"if",
"(",
"target",
"==",
"conn",
".",
"nick",
")",
":",
"bot_joined_channel",
"(",
"conn",
",",
... | this is user code triggered when a session was created on top of a connection . | train | false |
31,376 | def close_remote_debugger(rpcclt):
close_subprocess_debugger(rpcclt)
rpcclt.unregister(gui_adap_oid)
| [
"def",
"close_remote_debugger",
"(",
"rpcclt",
")",
":",
"close_subprocess_debugger",
"(",
"rpcclt",
")",
"rpcclt",
".",
"unregister",
"(",
"gui_adap_oid",
")"
] | shut down subprocess debugger and idle side of debugger rpc link request that the rpcserver shut down the subprocess debugger and link . | train | false |
31,377 | def MACDFIX(ds, count, signalperiod=(- (2 ** 31))):
ret = call_talib_with_ds(ds, count, talib.MACDFIX, signalperiod)
if (ret is None):
ret = (None, None, None)
return ret
| [
"def",
"MACDFIX",
"(",
"ds",
",",
"count",
",",
"signalperiod",
"=",
"(",
"-",
"(",
"2",
"**",
"31",
")",
")",
")",
":",
"ret",
"=",
"call_talib_with_ds",
"(",
"ds",
",",
"count",
",",
"talib",
".",
"MACDFIX",
",",
"signalperiod",
")",
"if",
"(",
... | moving average convergence/divergence fix 12/26 . | train | false |
31,378 | def get_key_reference(scope, name, user=None):
if ((scope == SYSTEM_SCOPE) or (scope == FULL_SYSTEM_SCOPE)):
return name
elif ((scope == USER_SCOPE) or (scope == FULL_USER_SCOPE)):
if (not user):
raise InvalidUserException('A valid user must be specified for user key ref.')
return UserKeyReference(name=name, user=user).ref
else:
raise InvalidScopeException(('Scope "%s" is not valid. Allowed scopes are %s.' % (scope, ALLOWED_SCOPES)))
| [
"def",
"get_key_reference",
"(",
"scope",
",",
"name",
",",
"user",
"=",
"None",
")",
":",
"if",
"(",
"(",
"scope",
"==",
"SYSTEM_SCOPE",
")",
"or",
"(",
"scope",
"==",
"FULL_SYSTEM_SCOPE",
")",
")",
":",
"return",
"name",
"elif",
"(",
"(",
"scope",
... | given a key name and user this method returns a new name to address the key value pair in the context of that user . | train | false |
31,380 | def generate_authors():
jenkins_email = 'jenkins@review.(openstack|stackforge).org'
old_authors = 'AUTHORS.in'
new_authors = 'AUTHORS'
if (not os.getenv('SKIP_GENERATE_AUTHORS')):
if os.path.isdir('.git'):
git_log_cmd = (("git log --format='%aN <%aE>' | sort -u | egrep -v '" + jenkins_email) + "'")
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open(new_authors, 'w') as new_authors_fh:
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
if os.path.exists(old_authors):
with open(old_authors, 'r') as old_authors_fh:
new_authors_fh.write(('\n' + old_authors_fh.read()))
else:
open(new_authors, 'w').close()
| [
"def",
"generate_authors",
"(",
")",
":",
"jenkins_email",
"=",
"'jenkins@review.(openstack|stackforge).org'",
"old_authors",
"=",
"'AUTHORS.in'",
"new_authors",
"=",
"'AUTHORS'",
"if",
"(",
"not",
"os",
".",
"getenv",
"(",
"'SKIP_GENERATE_AUTHORS'",
")",
")",
":",
... | create authors file using git commits . | train | false |
31,381 | def _get_system_version():
global _SYSTEM_VERSION
if (_SYSTEM_VERSION is None):
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except OSError:
pass
else:
try:
m = re.search('<key>ProductUserVisibleVersion</key>\\s*<string>(.*?)</string>', f.read())
finally:
f.close()
if (m is not None):
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
return _SYSTEM_VERSION
| [
"def",
"_get_system_version",
"(",
")",
":",
"global",
"_SYSTEM_VERSION",
"if",
"(",
"_SYSTEM_VERSION",
"is",
"None",
")",
":",
"_SYSTEM_VERSION",
"=",
"''",
"try",
":",
"f",
"=",
"open",
"(",
"'/System/Library/CoreServices/SystemVersion.plist'",
")",
"except",
"O... | return the os x system version as a string . | train | false |
31,382 | @verbose
def lookup_by_userid_demo():
oauth = credsfromfile()
client = Query(**oauth)
user_info = client.user_info_from_id(USERIDS)
for info in user_info:
name = info['screen_name']
followers = info['followers_count']
following = info['friends_count']
print('{0}, followers: {1}, following: {2}'.format(name, followers, following))
| [
"@",
"verbose",
"def",
"lookup_by_userid_demo",
"(",
")",
":",
"oauth",
"=",
"credsfromfile",
"(",
")",
"client",
"=",
"Query",
"(",
"**",
"oauth",
")",
"user_info",
"=",
"client",
".",
"user_info_from_id",
"(",
"USERIDS",
")",
"for",
"info",
"in",
"user_i... | use the rest api to convert a userid to a screen name . | train | false |
31,383 | def get_module(module_name):
try:
(module_path, module_class) = module_name.rsplit('.', 1)
base_module = __import__(module_path, globals(), locals(), [module_class])
module = getattr(base_module, module_class)
except (ImportError, AttributeError, ValueError) as e:
raise EAException(('Could not import module %s: %s' % (module_name, e)))
return module
| [
"def",
"get_module",
"(",
"module_name",
")",
":",
"try",
":",
"(",
"module_path",
",",
"module_class",
")",
"=",
"module_name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"base_module",
"=",
"__import__",
"(",
"module_path",
",",
"globals",
"(",
")",
","... | get an instance of the xmodule class identified by location . | train | false |
31,384 | def _dmp_ff_trivial_gcd(f, g, u, K):
zero_f = dmp_zero_p(f, u)
zero_g = dmp_zero_p(g, u)
if (zero_f and zero_g):
return tuple(dmp_zeros(3, u, K))
elif zero_f:
return (dmp_ground_monic(g, u, K), dmp_zero(u), dmp_ground(dmp_ground_LC(g, u, K), u))
elif zero_g:
return (dmp_ground_monic(f, u, K), dmp_ground(dmp_ground_LC(f, u, K), u), dmp_zero(u))
elif query('USE_SIMPLIFY_GCD'):
return _dmp_simplify_gcd(f, g, u, K)
else:
return None
| [
"def",
"_dmp_ff_trivial_gcd",
"(",
"f",
",",
"g",
",",
"u",
",",
"K",
")",
":",
"zero_f",
"=",
"dmp_zero_p",
"(",
"f",
",",
"u",
")",
"zero_g",
"=",
"dmp_zero_p",
"(",
"g",
",",
"u",
")",
"if",
"(",
"zero_f",
"and",
"zero_g",
")",
":",
"return",
... | handle trivial cases in gcd algorithm over a field . | train | false |
31,385 | def get_transport(conf, url=None, optional=False, cache=True):
global TRANSPORTS, DEFAULT_URL
cache_key = (url or DEFAULT_URL)
transport = TRANSPORTS.get(cache_key)
if ((not transport) or (not cache)):
try:
transport = oslo_messaging.get_transport(conf, url)
except (oslo_messaging.InvalidTransportURL, oslo_messaging.DriverLoadFailure):
if ((not optional) or url):
raise
return None
else:
if cache:
TRANSPORTS[cache_key] = transport
return transport
| [
"def",
"get_transport",
"(",
"conf",
",",
"url",
"=",
"None",
",",
"optional",
"=",
"False",
",",
"cache",
"=",
"True",
")",
":",
"global",
"TRANSPORTS",
",",
"DEFAULT_URL",
"cache_key",
"=",
"(",
"url",
"or",
"DEFAULT_URL",
")",
"transport",
"=",
"TRANS... | initialise the oslo_messaging layer . | train | false |
31,386 | def CheckColor(color, web_dir):
if (color and os.path.exists(os.path.join(web_dir, (('static/stylesheets/colorschemes/' + color) + '.css')))):
return color
elif (color and os.path.exists(os.path.join(web_dir, ('static/stylesheets/colorschemes/' + color)))):
return color
else:
return ''
| [
"def",
"CheckColor",
"(",
"color",
",",
"web_dir",
")",
":",
"if",
"(",
"color",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"web_dir",
",",
"(",
"(",
"'static/stylesheets/colorschemes/'",
"+",
"color",
")",
"+",
... | check existence of color-scheme . | train | false |
31,388 | def salt_ssh():
import salt.cli.ssh
if ('' in sys.path):
sys.path.remove('')
try:
client = salt.cli.ssh.SaltSSH()
_install_signal_handlers(client)
client.run()
except SaltClientError as err:
trace = traceback.format_exc()
try:
hardcrash = client.options.hard_crash
except (AttributeError, KeyError):
hardcrash = False
_handle_interrupt(SystemExit(err), err, hardcrash, trace=trace)
| [
"def",
"salt_ssh",
"(",
")",
":",
"import",
"salt",
".",
"cli",
".",
"ssh",
"if",
"(",
"''",
"in",
"sys",
".",
"path",
")",
":",
"sys",
".",
"path",
".",
"remove",
"(",
"''",
")",
"try",
":",
"client",
"=",
"salt",
".",
"cli",
".",
"ssh",
"."... | execute the salt-ssh system . | train | true |
31,389 | def _initial_image_sum(width, height, channels):
if (channels == 1):
return np.zeros((height, width), np.float64)
else:
return np.zeros((height, width, channels), np.float64)
| [
"def",
"_initial_image_sum",
"(",
"width",
",",
"height",
",",
"channels",
")",
":",
"if",
"(",
"channels",
"==",
"1",
")",
":",
"return",
"np",
".",
"zeros",
"(",
"(",
"height",
",",
"width",
")",
",",
"np",
".",
"float64",
")",
"else",
":",
"retu... | returns an array of zeros that will be used to store the accumulated sum of images . | train | false |
31,391 | @require_POST
@csrf_protect
def reposync(request):
if (not test_user_authenticated(request)):
return login(request, next='/cobbler_web/reposync', expired=True)
remote.background_reposync({'names': '', 'tries': 3}, request.session['token'])
return HttpResponseRedirect('/cobbler_web/task_created')
| [
"@",
"require_POST",
"@",
"csrf_protect",
"def",
"reposync",
"(",
"request",
")",
":",
"if",
"(",
"not",
"test_user_authenticated",
"(",
"request",
")",
")",
":",
"return",
"login",
"(",
"request",
",",
"next",
"=",
"'/cobbler_web/reposync'",
",",
"expired",
... | syncs all repos that are configured to be synced . | train | false |
31,392 | def parse_name_and_version(p):
m = NAME_VERSION_RE.match(p)
if (not m):
raise DistlibException(("Ill-formed name/version string: '%s'" % p))
d = m.groupdict()
return (d['name'].strip().lower(), d['ver'])
| [
"def",
"parse_name_and_version",
"(",
"p",
")",
":",
"m",
"=",
"NAME_VERSION_RE",
".",
"match",
"(",
"p",
")",
"if",
"(",
"not",
"m",
")",
":",
"raise",
"DistlibException",
"(",
"(",
"\"Ill-formed name/version string: '%s'\"",
"%",
"p",
")",
")",
"d",
"=",... | a utility method used to get name and version from a string . | train | true |
31,393 | def parsetree(s, *args, **kwargs):
return Text(parse(unicode(s), *args, **kwargs))
| [
"def",
"parsetree",
"(",
"s",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"Text",
"(",
"parse",
"(",
"unicode",
"(",
"s",
")",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
")"
] | returns a parsed text from the given string . | train | false |
31,394 | def _connections_for_states(base_mapper, uowtransaction, states):
if uowtransaction.session.connection_callable:
connection_callable = uowtransaction.session.connection_callable
else:
connection = None
connection_callable = None
for state in _sort_states(states):
if connection_callable:
connection = connection_callable(base_mapper, state.obj())
elif (not connection):
connection = uowtransaction.transaction.connection(base_mapper)
mapper = _state_mapper(state)
(yield (state, state.dict, mapper, connection))
| [
"def",
"_connections_for_states",
"(",
"base_mapper",
",",
"uowtransaction",
",",
"states",
")",
":",
"if",
"uowtransaction",
".",
"session",
".",
"connection_callable",
":",
"connection_callable",
"=",
"uowtransaction",
".",
"session",
".",
"connection_callable",
"el... | return an iterator of . | train | false |
31,395 | def GetTensorOpName(x):
t = x.name.rsplit(':', 1)
if (len(t) == 1):
return x.name
else:
return t[0]
| [
"def",
"GetTensorOpName",
"(",
"x",
")",
":",
"t",
"=",
"x",
".",
"name",
".",
"rsplit",
"(",
"':'",
",",
"1",
")",
"if",
"(",
"len",
"(",
"t",
")",
"==",
"1",
")",
":",
"return",
"x",
".",
"name",
"else",
":",
"return",
"t",
"[",
"0",
"]"
... | get the name of the op that created a tensor . | train | false |
31,396 | def app_available(app_name):
devnull_fd = open(devnull, 'w')
output = True
try:
call([app_name], stdout=devnull_fd, stderr=devnull_fd)
except OSError:
output = False
finally:
devnull_fd.close()
return output
| [
"def",
"app_available",
"(",
"app_name",
")",
":",
"devnull_fd",
"=",
"open",
"(",
"devnull",
",",
"'w'",
")",
"output",
"=",
"True",
"try",
":",
"call",
"(",
"[",
"app_name",
"]",
",",
"stdout",
"=",
"devnull_fd",
",",
"stderr",
"=",
"devnull_fd",
")"... | check if a binary is available and on the user path inputs: app_name: name of the binary . | train | false |
31,397 | def getTransformedPathsByKey(key, xmlElement):
if (key not in xmlElement.attributeDictionary):
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if ((evaluatedLinkValue.__class__ == dict) or (evaluatedLinkValue.__class__ == list)):
convertToTransformedPaths(evaluatedLinkValue)
return getPathsByLists(evaluatedLinkValue)
xmlElementObject = getXMLElementObject(evaluatedLinkValue)
if (xmlElementObject == None):
return []
return xmlElementObject.getTransformedPaths()
| [
"def",
"getTransformedPathsByKey",
"(",
"key",
",",
"xmlElement",
")",
":",
"if",
"(",
"key",
"not",
"in",
"xmlElement",
".",
"attributeDictionary",
")",
":",
"return",
"[",
"]",
"word",
"=",
"str",
"(",
"xmlElement",
".",
"attributeDictionary",
"[",
"key",
... | get transformed paths by key . | train | false |
31,399 | def install_windowsfeatures(name):
return install(name, source='windowsfeatures')
| [
"def",
"install_windowsfeatures",
"(",
"name",
")",
":",
"return",
"install",
"(",
"name",
",",
"source",
"=",
"'windowsfeatures'",
")"
] | instructs chocolatey to install a windows feature via the deployment image servicing and management tool . | train | false |
31,400 | def test_replicate_value_error():
t1 = Time('2007:001', scale='tai')
with pytest.raises(ValueError) as err:
t1.replicate(format='definitely_not_a_valid_format')
assert ('format must be one of' in str(err))
| [
"def",
"test_replicate_value_error",
"(",
")",
":",
"t1",
"=",
"Time",
"(",
"'2007:001'",
",",
"scale",
"=",
"'tai'",
")",
"with",
"pytest",
".",
"raises",
"(",
"ValueError",
")",
"as",
"err",
":",
"t1",
".",
"replicate",
"(",
"format",
"=",
"'definitely... | passing a bad format to replicate should raise valueerror . | train | false |
31,401 | def all_issues(issues):
logging.info('finding issues...')
seen = set()
for issue in issues:
if (issue['title'] not in seen):
seen.add(issue['title'])
(yield issue)
| [
"def",
"all_issues",
"(",
"issues",
")",
":",
"logging",
".",
"info",
"(",
"'finding issues...'",
")",
"seen",
"=",
"set",
"(",
")",
"for",
"issue",
"in",
"issues",
":",
"if",
"(",
"issue",
"[",
"'title'",
"]",
"not",
"in",
"seen",
")",
":",
"seen",
... | yields unique set of issues given a list of issues . | train | true |
31,402 | def is_systemd_distribution(distribution):
return (is_centos_or_rhel(distribution) or (distribution == 'ubuntu-16.04'))
| [
"def",
"is_systemd_distribution",
"(",
"distribution",
")",
":",
"return",
"(",
"is_centos_or_rhel",
"(",
"distribution",
")",
"or",
"(",
"distribution",
"==",
"'ubuntu-16.04'",
")",
")"
] | determine whether the named distribution uses systemd . | train | false |
31,404 | def check_explicit_underscore_import(logical_line, filename):
for file in UNDERSCORE_IMPORT_FILES:
if (file in filename):
return
if (underscore_import_check.match(logical_line) or underscore_import_check_multi.match(logical_line) or custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or string_translation.match(logical_line)):
(yield (0, 'N323: Found use of _() without explicit import of _ !'))
| [
"def",
"check_explicit_underscore_import",
"(",
"logical_line",
",",
"filename",
")",
":",
"for",
"file",
"in",
"UNDERSCORE_IMPORT_FILES",
":",
"if",
"(",
"file",
"in",
"filename",
")",
":",
"return",
"if",
"(",
"underscore_import_check",
".",
"match",
"(",
"log... | check for explicit import of the _ function we need to ensure that any files that are using the _() function to translate logs are explicitly importing the _ function . | train | false |
31,405 | def s_static(value, name=None):
static = primitives.static(value, name)
blocks.CURRENT.push(static)
| [
"def",
"s_static",
"(",
"value",
",",
"name",
"=",
"None",
")",
":",
"static",
"=",
"primitives",
".",
"static",
"(",
"value",
",",
"name",
")",
"blocks",
".",
"CURRENT",
".",
"push",
"(",
"static",
")"
] | push a static value onto the current block stack . | train | false |
31,406 | def theme_project():
return s3_rest_controller()
| [
"def",
"theme_project",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] | restful crud controller - not normally exposed to users via a menu . | train | false |
31,407 | def _volume_admin_metadata_get(context, volume_id):
return {'fake_key': 'fake_value'}
| [
"def",
"_volume_admin_metadata_get",
"(",
"context",
",",
"volume_id",
")",
":",
"return",
"{",
"'fake_key'",
":",
"'fake_value'",
"}"
] | return dummy admin metadata . | train | false |
31,408 | def get_suitable_downloader(info_dict):
url = info_dict[u'url']
protocol = info_dict.get(u'protocol')
if url.startswith(u'rtmp'):
return RtmpFD
if (protocol == u'm3u8_native'):
return NativeHlsFD
if ((protocol == u'm3u8') or ((protocol is None) and (determine_ext(url) == u'm3u8'))):
return HlsFD
if (url.startswith(u'mms') or url.startswith(u'rtsp')):
return MplayerFD
if (determine_ext(url) == u'f4m'):
return F4mFD
else:
return HttpFD
| [
"def",
"get_suitable_downloader",
"(",
"info_dict",
")",
":",
"url",
"=",
"info_dict",
"[",
"u'url'",
"]",
"protocol",
"=",
"info_dict",
".",
"get",
"(",
"u'protocol'",
")",
"if",
"url",
".",
"startswith",
"(",
"u'rtmp'",
")",
":",
"return",
"RtmpFD",
"if"... | get the downloader class that can handle the info dict . | train | false |
31,409 | @handle_response_format
@treeio_login_required
def set_edit(request, set_id, response_format='html'):
changeset = get_object_or_404(ChangeSet, pk=set_id)
if ((not request.user.profile.has_permission(changeset.object, mode='w')) and (not request.user.profile.is_admin('treeio.changes'))):
return user_denied(request, "You don't have access to this Change Set.", response_format=response_format)
if request.POST:
form = ChangeSetForm(request.user.profile, request.POST, instance=changeset)
if form.is_valid():
changeset = form.save()
return HttpResponseRedirect(reverse('changes_set_view', args=[changeset.id]))
else:
form = ChangeSetForm(request.user.profile, instance=changeset)
context = _get_default_context(request)
context.update({'changeset': changeset, 'form': form})
return render_to_response('changes/set_edit', context, context_instance=RequestContext(request), response_format=response_format)
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"set_edit",
"(",
"request",
",",
"set_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"changeset",
"=",
"get_object_or_404",
"(",
"ChangeSet",
",",
"pk",
"=",
"set_id",
")",
"if",
"(",
"... | changeset edit . | train | false |
31,410 | def _load_settings(validate=True):
settings = {'units': DEFAULT_UNITS, 'icons': DEFAULT_ICONS, 'days': 3, 'version': 2}
if os.path.exists(settings_file):
with open(settings_file, 'rt') as sf:
s = json.load(sf)
if ('version' not in s):
_migrate_settings(s)
settings.update(s)
if validate:
if ('service' not in settings):
raise SetupError('You need to set your weather service', 'Use the "wset service" command.')
if ('location' not in settings):
raise SetupError('Missing default location', 'You must specify a default location with the "wset location" command')
return settings
| [
"def",
"_load_settings",
"(",
"validate",
"=",
"True",
")",
":",
"settings",
"=",
"{",
"'units'",
":",
"DEFAULT_UNITS",
",",
"'icons'",
":",
"DEFAULT_ICONS",
",",
"'days'",
":",
"3",
",",
"'version'",
":",
"2",
"}",
"if",
"os",
".",
"path",
".",
"exist... | get an the location and units to use . | train | false |
31,413 | def _concat_categorical(to_concat, axis=0):
def _concat_asobject(to_concat):
to_concat = [(x.get_values() if is_categorical_dtype(x.dtype) else x.ravel()) for x in to_concat]
res = _concat_compat(to_concat)
if (axis == 1):
return res.reshape(1, len(res))
else:
return res
categoricals = [x for x in to_concat if is_categorical_dtype(x.dtype)]
if (len(categoricals) != len(to_concat)):
pass
else:
first = to_concat[0]
if all((first.is_dtype_equal(other) for other in to_concat[1:])):
return union_categoricals(categoricals)
return _concat_asobject(to_concat)
| [
"def",
"_concat_categorical",
"(",
"to_concat",
",",
"axis",
"=",
"0",
")",
":",
"def",
"_concat_asobject",
"(",
"to_concat",
")",
":",
"to_concat",
"=",
"[",
"(",
"x",
".",
"get_values",
"(",
")",
"if",
"is_categorical_dtype",
"(",
"x",
".",
"dtype",
")... | concatenate an object/categorical array of arrays . | train | false |
31,414 | def GetParentDeviceByType(device, parent_type):
parent_type = parent_type.encode('mac_roman')
while (IORegistryEntryGetName(device) != parent_type):
parent = ctypes.c_void_p()
response = iokit.IORegistryEntryGetParentEntry(device, 'IOService'.encode('mac_roman'), ctypes.byref(parent))
if (response != 0):
return None
device = parent
return device
| [
"def",
"GetParentDeviceByType",
"(",
"device",
",",
"parent_type",
")",
":",
"parent_type",
"=",
"parent_type",
".",
"encode",
"(",
"'mac_roman'",
")",
"while",
"(",
"IORegistryEntryGetName",
"(",
"device",
")",
"!=",
"parent_type",
")",
":",
"parent",
"=",
"c... | find the first parent of a device that implements the parent_type . | train | false |
31,415 | def single_source_bellman_ford_path(G, source, cutoff=None, weight='weight'):
(length, path) = single_source_bellman_ford(G, source, cutoff=cutoff, weight=weight)
return path
| [
"def",
"single_source_bellman_ford_path",
"(",
"G",
",",
"source",
",",
"cutoff",
"=",
"None",
",",
"weight",
"=",
"'weight'",
")",
":",
"(",
"length",
",",
"path",
")",
"=",
"single_source_bellman_ford",
"(",
"G",
",",
"source",
",",
"cutoff",
"=",
"cutof... | compute shortest path between source and all other reachable nodes for a weighted graph . | train | false |
31,416 | def select_functions(methods, data):
s = set()
for (condition, funcs) in methods:
if match(condition, data):
s |= funcs
return dict(((name(func), func) for func in s))
| [
"def",
"select_functions",
"(",
"methods",
",",
"data",
")",
":",
"s",
"=",
"set",
"(",
")",
"for",
"(",
"condition",
",",
"funcs",
")",
"in",
"methods",
":",
"if",
"match",
"(",
"condition",
",",
"data",
")",
":",
"s",
"|=",
"funcs",
"return",
"di... | select appropriate functions given types and predicates . | train | false |
31,417 | def robustApply(receiver, *arguments, **named):
(receiver, codeObject, startIndex) = function(receiver)
acceptable = codeObject.co_varnames[(startIndex + len(arguments)):codeObject.co_argcount]
for name in codeObject.co_varnames[startIndex:(startIndex + len(arguments))]:
if named.has_key(name):
raise TypeError(('Argument %r specified both positionally and as a keyword for calling %r' % (name, receiver)))
if (not (codeObject.co_flags & 8)):
for arg in named.keys():
if (arg not in acceptable):
del named[arg]
return receiver(*arguments, **named)
| [
"def",
"robustApply",
"(",
"receiver",
",",
"*",
"arguments",
",",
"**",
"named",
")",
":",
"(",
"receiver",
",",
"codeObject",
",",
"startIndex",
")",
"=",
"function",
"(",
"receiver",
")",
"acceptable",
"=",
"codeObject",
".",
"co_varnames",
"[",
"(",
... | call receiver with arguments and an appropriate subset of named . | train | false |
31,418 | def hue_light(registry, xml_parent, data):
hue_light = XML.SubElement(xml_parent, 'org.jenkinsci.plugins.hue__light.LightNotifier')
hue_light.set('plugin', 'hue-light')
lightId = XML.SubElement(hue_light, 'lightId')
id_mapping = [('light-id', 'string', None)]
helpers.convert_mapping_to_xml(lightId, data, id_mapping, fail_required=True)
build_mapping = [('pre-build', 'preBuild', 'blue'), ('good-build', 'goodBuild', 'green'), ('unstable-build', 'unstableBuild', 'yellow'), ('bad-build', 'badBuild', 'red')]
helpers.convert_mapping_to_xml(hue_light, data, build_mapping, fail_required=True)
| [
"def",
"hue_light",
"(",
"registry",
",",
"xml_parent",
",",
"data",
")",
":",
"hue_light",
"=",
"XML",
".",
"SubElement",
"(",
"xml_parent",
",",
"'org.jenkinsci.plugins.hue__light.LightNotifier'",
")",
"hue_light",
".",
"set",
"(",
"'plugin'",
",",
"'hue-light'"... | yaml: hue-light this plugin shows the state of your builds using the awesome philips hue lights . | train | false |
31,419 | def isTestFile(filename):
basename = os.path.basename(filename)
return (basename.startswith('test_') and (os.path.splitext(basename)[1] == '.py'))
| [
"def",
"isTestFile",
"(",
"filename",
")",
":",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"return",
"(",
"basename",
".",
"startswith",
"(",
"'test_'",
")",
"and",
"(",
"os",
".",
"path",
".",
"splitext",
"(",
"basename... | returns true if filename looks like a file containing unit tests . | train | false |
31,420 | def test_can_parse_two_ordinary_steps():
steps = Step.many_from_lines((I_DIE_HAPPY.splitlines() + I_LIKE_VEGETABLES.splitlines()))
assert_equals(len(steps), 2)
assert isinstance(steps[0], Step)
assert isinstance(steps[1], Step)
assert_equals(steps[0].sentence, I_DIE_HAPPY)
assert_equals(steps[1].sentence, I_LIKE_VEGETABLES)
| [
"def",
"test_can_parse_two_ordinary_steps",
"(",
")",
":",
"steps",
"=",
"Step",
".",
"many_from_lines",
"(",
"(",
"I_DIE_HAPPY",
".",
"splitlines",
"(",
")",
"+",
"I_LIKE_VEGETABLES",
".",
"splitlines",
"(",
")",
")",
")",
"assert_equals",
"(",
"len",
"(",
... | it should correctly extract two ordinary steps into an array . | train | false |
31,421 | def patharg(path):
return path.replace('\\', '\\\\\\')
| [
"def",
"patharg",
"(",
"path",
")",
":",
"return",
"path",
".",
"replace",
"(",
"'\\\\'",
",",
"'\\\\\\\\\\\\'",
")"
] | back slashes need to be escaped in item args . | train | false |
31,422 | def gettext_noop(message):
return message
| [
"def",
"gettext_noop",
"(",
"message",
")",
":",
"return",
"message"
] | marks strings for translation but doesnt translate them now . | train | false |
31,423 | def _get_client_and_key(url, user, password, verbose=0):
session = {}
session['client'] = six.moves.xmlrpc_client.Server(url, verbose=verbose, use_datetime=True)
session['key'] = session['client'].auth.login(user, password)
return session
| [
"def",
"_get_client_and_key",
"(",
"url",
",",
"user",
",",
"password",
",",
"verbose",
"=",
"0",
")",
":",
"session",
"=",
"{",
"}",
"session",
"[",
"'client'",
"]",
"=",
"six",
".",
"moves",
".",
"xmlrpc_client",
".",
"Server",
"(",
"url",
",",
"ve... | return the client object and session key for the client . | train | true |
31,424 | def read_hdf5(fname, title='h5io', slash='ignore'):
h5py = _check_h5py()
if (not op.isfile(fname)):
raise IOError(('file "%s" not found' % fname))
if (not isinstance(title, string_types)):
raise ValueError('title must be a string')
with h5py.File(fname, mode='r') as fid:
if (title not in fid):
raise ValueError(('no "%s" data found' % title))
if isinstance(fid[title], h5py.Group):
if ('TITLE' not in fid[title].attrs):
raise ValueError(('no "%s" data found' % title))
data = _triage_read(fid[title], slash=slash)
return data
| [
"def",
"read_hdf5",
"(",
"fname",
",",
"title",
"=",
"'h5io'",
",",
"slash",
"=",
"'ignore'",
")",
":",
"h5py",
"=",
"_check_h5py",
"(",
")",
"if",
"(",
"not",
"op",
".",
"isfile",
"(",
"fname",
")",
")",
":",
"raise",
"IOError",
"(",
"(",
"'file \... | read python object from hdf5 format using h5py parameters fname : str file to load . | train | false |
31,426 | def _is_user_author_or_privileged(cc_content, context):
return (context['is_requester_privileged'] or (context['cc_requester']['id'] == cc_content['user_id']))
| [
"def",
"_is_user_author_or_privileged",
"(",
"cc_content",
",",
"context",
")",
":",
"return",
"(",
"context",
"[",
"'is_requester_privileged'",
"]",
"or",
"(",
"context",
"[",
"'cc_requester'",
"]",
"[",
"'id'",
"]",
"==",
"cc_content",
"[",
"'user_id'",
"]",
... | check if the user is the author of a content object or a privileged user . | train | false |
31,428 | def _teardown_log():
global log
if log.exception_logging_enabled():
log.disable_exception_logging()
if log.warnings_logging_enabled():
log.disable_warnings_logging()
del log
try:
logging._acquireLock()
try:
loggerDict = logging.Logger.manager.loggerDict
for key in loggerDict.keys():
if ((key == 'astropy') or key.startswith('astropy.')):
del loggerDict[key]
finally:
logging._releaseLock()
except Exception:
pass
| [
"def",
"_teardown_log",
"(",
")",
":",
"global",
"log",
"if",
"log",
".",
"exception_logging_enabled",
"(",
")",
":",
"log",
".",
"disable_exception_logging",
"(",
")",
"if",
"log",
".",
"warnings_logging_enabled",
"(",
")",
":",
"log",
".",
"disable_warnings_... | shut down exception and warning logging and clear all astropy loggers from the logging modules cache . | train | false |
31,429 | def _guess_apiserver(apiserver_url=None):
default_config = '/etc/kubernetes/config'
if (apiserver_url is not None):
return apiserver_url
if ('KUBERNETES_MASTER' in os.environ):
apiserver_url = os.environ.get('KUBERNETES_MASTER')
elif __salt__['config.get']('k8s:master'):
apiserver_url = __salt__['config.get']('k8s:master')
elif (os.path.exists(default_config) or __salt__['config.get']('k8s:config', '')):
config = __salt__['config.get']('k8s:config', default_config)
kubeapi_regex = re.compile('KUBE_MASTER=[\'"]--master=(.*)[\'"]', re.MULTILINE)
with salt.utils.fopen(config) as fh_k8s:
for line in fh_k8s.readlines():
match_line = kubeapi_regex.match(line)
if match_line:
apiserver_url = match_line.group(1)
else:
apiserver_url = 'http://127.0.0.1:8080'
log.debug('Discoverd k8s API server address: {0}'.format(apiserver_url))
return apiserver_url
| [
"def",
"_guess_apiserver",
"(",
"apiserver_url",
"=",
"None",
")",
":",
"default_config",
"=",
"'/etc/kubernetes/config'",
"if",
"(",
"apiserver_url",
"is",
"not",
"None",
")",
":",
"return",
"apiserver_url",
"if",
"(",
"'KUBERNETES_MASTER'",
"in",
"os",
".",
"e... | try to guees the kubemaster url from environ . | train | true |
31,430 | def model_save(self, commit=True):
if self.errors:
raise ValueError(("The %s could not be created because the data didn't validate." % self._model._meta.object_name))
return save_instance(self, self._model(), commit)
| [
"def",
"model_save",
"(",
"self",
",",
"commit",
"=",
"True",
")",
":",
"if",
"self",
".",
"errors",
":",
"raise",
"ValueError",
"(",
"(",
"\"The %s could not be created because the data didn't validate.\"",
"%",
"self",
".",
"_model",
".",
"_meta",
".",
"object... | creates and returns model instance according to self . | train | false |
31,431 | @require_GET
@allow_CORS_GET
@xframe_options_exempt
@process_document_path
def raw_code_sample_file(request, document_slug, document_locale, sample_name, attachment_id, filename):
return redirect(full_attachment_url(attachment_id, filename))
| [
"@",
"require_GET",
"@",
"allow_CORS_GET",
"@",
"xframe_options_exempt",
"@",
"process_document_path",
"def",
"raw_code_sample_file",
"(",
"request",
",",
"document_slug",
",",
"document_locale",
",",
"sample_name",
",",
"attachment_id",
",",
"filename",
")",
":",
"re... | a view redirecting to the real file serving view of the attachments app . | train | false |
31,432 | def get_pages(app=None):
pages = {}
frappe.local.flags.in_get_all_pages = True
folders = (frappe.local.flags.web_pages_folders or (u'www', u'templates/pages'))
if app:
apps = [app]
else:
apps = (frappe.local.flags.web_pages_apps or frappe.get_installed_apps())
for app in apps:
app_path = frappe.get_app_path(app)
for start in folders:
path = os.path.join(app_path, start)
pages.update(get_pages_from_path(path, app, app_path))
frappe.local.flags.in_get_all_pages = False
return pages
| [
"def",
"get_pages",
"(",
"app",
"=",
"None",
")",
":",
"pages",
"=",
"{",
"}",
"frappe",
".",
"local",
".",
"flags",
".",
"in_get_all_pages",
"=",
"True",
"folders",
"=",
"(",
"frappe",
".",
"local",
".",
"flags",
".",
"web_pages_folders",
"or",
"(",
... | get all pages . | train | false |
31,434 | def list_new(t):
name = raw_input(light_magenta("New list's name: ", rl=True))
mode = raw_input(light_magenta("New list's mode (public/private): ", rl=True))
description = raw_input(light_magenta("New list's description: ", rl=True))
try:
t.lists.create(name=name, mode=mode, description=description)
printNicely(green((name + ' list is created.')))
except:
debug_option()
printNicely(red('Oops something is wrong with Twitter :('))
| [
"def",
"list_new",
"(",
"t",
")",
":",
"name",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"\"New list's name: \"",
",",
"rl",
"=",
"True",
")",
")",
"mode",
"=",
"raw_input",
"(",
"light_magenta",
"(",
"\"New list's mode (public/private): \"",
",",
"rl",
"="... | create a new list . | train | false |
31,435 | def getTranslateTetragridByTranslation(translation):
return [[1.0, 0.0, 0.0, translation.x], [0.0, 1.0, 0.0, translation.y], [0.0, 0.0, 1.0, translation.z], [0.0, 0.0, 0.0, 1.0]]
| [
"def",
"getTranslateTetragridByTranslation",
"(",
"translation",
")",
":",
"return",
"[",
"[",
"1.0",
",",
"0.0",
",",
"0.0",
",",
"translation",
".",
"x",
"]",
",",
"[",
"0.0",
",",
"1.0",
",",
"0.0",
",",
"translation",
".",
"y",
"]",
",",
"[",
"0.... | get translate tetragrid by translation . | train | false |
31,436 | def _normparse(text):
return _normexpr(functemplate._parse(text))
| [
"def",
"_normparse",
"(",
"text",
")",
":",
"return",
"_normexpr",
"(",
"functemplate",
".",
"_parse",
"(",
"text",
")",
")"
] | parse a template and then normalize the resulting expression . | train | false |
31,438 | @export_as_api
@ExecutionContext.enforce_phase(EXECUTION_PHASE.BEFORE_TRADING, EXECUTION_PHASE.HANDLE_BAR, EXECUTION_PHASE.SCHEDULED)
def get_order(order_id):
return get_simu_exchange().get_order(order_id)
| [
"@",
"export_as_api",
"@",
"ExecutionContext",
".",
"enforce_phase",
"(",
"EXECUTION_PHASE",
".",
"BEFORE_TRADING",
",",
"EXECUTION_PHASE",
".",
"HANDLE_BAR",
",",
"EXECUTION_PHASE",
".",
"SCHEDULED",
")",
"def",
"get_order",
"(",
"order_id",
")",
":",
"return",
"... | get a specified order by the unique order_id . | train | false |
31,439 | def vec_to_dict(vector, var_offsets, var_sizes):
val_dict = {}
for (id_, offset) in var_offsets.items():
size = var_sizes[id_]
value = np.zeros(size)
offset = var_offsets[id_]
for col in range(size[1]):
value[:, col] = vector[offset:(size[0] + offset)]
offset += size[0]
val_dict[id_] = value
return val_dict
| [
"def",
"vec_to_dict",
"(",
"vector",
",",
"var_offsets",
",",
"var_sizes",
")",
":",
"val_dict",
"=",
"{",
"}",
"for",
"(",
"id_",
",",
"offset",
")",
"in",
"var_offsets",
".",
"items",
"(",
")",
":",
"size",
"=",
"var_sizes",
"[",
"id_",
"]",
"value... | converts a vector to a map of variable id to value . | train | false |
31,440 | def get_downloaded_sites(jsonurl=u'http://data.astropy.org/coordinates/sites.json'):
jsondb = json.loads(get_file_contents(jsonurl, show_progress=False))
return SiteRegistry.from_json(jsondb)
| [
"def",
"get_downloaded_sites",
"(",
"jsonurl",
"=",
"u'http://data.astropy.org/coordinates/sites.json'",
")",
":",
"jsondb",
"=",
"json",
".",
"loads",
"(",
"get_file_contents",
"(",
"jsonurl",
",",
"show_progress",
"=",
"False",
")",
")",
"return",
"SiteRegistry",
... | load observatory database from data . | train | false |
31,441 | def ode_1st_exact(eq, func, order, match):
x = func.args[0]
f = func.func
r = match
e = r[r['e']]
d = r[r['d']]
global y
y = r['y']
C1 = get_numbered_constants(eq, num=1)
sol = (Integral(d, x) + Integral((e - Integral(d, x).diff(y)), y))
return Eq(sol, C1)
| [
"def",
"ode_1st_exact",
"(",
"eq",
",",
"func",
",",
"order",
",",
"match",
")",
":",
"x",
"=",
"func",
".",
"args",
"[",
"0",
"]",
"f",
"=",
"func",
".",
"func",
"r",
"=",
"match",
"e",
"=",
"r",
"[",
"r",
"[",
"'e'",
"]",
"]",
"d",
"=",
... | solves 1st order exact ordinary differential equations . | train | false |
31,443 | def sdm_LM(f):
return f[0][0]
| [
"def",
"sdm_LM",
"(",
"f",
")",
":",
"return",
"f",
"[",
"0",
"]",
"[",
"0",
"]"
] | returns the leading monomial of f . | train | false |
31,444 | def assignmentFailure():
a = TpPd(pd=6)
b = MessageType(mesType=47)
c = RrCause()
packet = ((a / b) / c)
return packet
| [
"def",
"assignmentFailure",
"(",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"6",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"47",
")",
"c",
"=",
"RrCause",
"(",
")",
"packet",
"=",
"(",
"(",
"a",
"/",
"b",
")",
"/",
"c",
")",
"return... | assignment failure section 9 . | train | true |
31,445 | def login_required(handler_method):
def check_login(self, *args, **kwargs):
if (self.request.method != 'GET'):
self.abort(400, detail='The login_required decorator can only be used for GET requests.')
user = users.get_current_user()
if (not user):
return self.redirect(users.create_login_url(self.request.url))
else:
handler_method(self, *args, **kwargs)
return check_login
| [
"def",
"login_required",
"(",
"handler_method",
")",
":",
"def",
"check_login",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"self",
".",
"request",
".",
"method",
"!=",
"'GET'",
")",
":",
"self",
".",
"abort",
"(",
"400",... | like djangos login_required . | train | false |
31,446 | def _open(cgi, params=None, post=None, ecitmatch=False):
delay = 0.333333334
current = time.time()
wait = ((_open.previous + delay) - current)
if (wait > 0):
time.sleep(wait)
_open.previous = (current + wait)
else:
_open.previous = current
params = _construct_params(params)
options = _encode_options(ecitmatch, params)
if ((post is None) and (len(options) > 1000)):
post = True
cgi = _construct_cgi(cgi, post, options)
try:
if post:
handle = _urlopen(cgi, data=_as_bytes(options))
else:
handle = _urlopen(cgi)
except _HTTPError as exception:
raise exception
return _binary_to_string_handle(handle)
| [
"def",
"_open",
"(",
"cgi",
",",
"params",
"=",
"None",
",",
"post",
"=",
"None",
",",
"ecitmatch",
"=",
"False",
")",
":",
"delay",
"=",
"0.333333334",
"current",
"=",
"time",
".",
"time",
"(",
")",
"wait",
"=",
"(",
"(",
"_open",
".",
"previous",... | open a hnadle to scop . | train | false |
31,448 | @pytest.fixture(scope='session')
def test_fs():
import pytest_pootle
class TestFs(object, ):
def path(self, path):
return os.path.join(os.path.dirname(pytest_pootle.__file__), path)
def open(self, paths, *args, **kwargs):
if isinstance(paths, (list, tuple)):
paths = os.path.join(*paths)
return open(self.path(paths), *args, **kwargs)
return TestFs()
| [
"@",
"pytest",
".",
"fixture",
"(",
"scope",
"=",
"'session'",
")",
"def",
"test_fs",
"(",
")",
":",
"import",
"pytest_pootle",
"class",
"TestFs",
"(",
"object",
",",
")",
":",
"def",
"path",
"(",
"self",
",",
"path",
")",
":",
"return",
"os",
".",
... | test fullscreen support . | train | false |
31,449 | def page_sizes(size):
sizes = {'A0': pagesizes.A0, 'A1': pagesizes.A1, 'A2': pagesizes.A2, 'A3': pagesizes.A3, 'A4': pagesizes.A4, 'A5': pagesizes.A5, 'A6': pagesizes.A6, 'B0': pagesizes.B0, 'B1': pagesizes.B1, 'B2': pagesizes.B2, 'B3': pagesizes.B3, 'B4': pagesizes.B4, 'B5': pagesizes.B5, 'B6': pagesizes.B6, 'ELEVENSEVENTEEN': pagesizes.ELEVENSEVENTEEN, 'LEGAL': pagesizes.LEGAL, 'LETTER': pagesizes.LETTER}
try:
return sizes[size]
except:
raise ValueError(('%s not in list of page sizes' % size))
| [
"def",
"page_sizes",
"(",
"size",
")",
":",
"sizes",
"=",
"{",
"'A0'",
":",
"pagesizes",
".",
"A0",
",",
"'A1'",
":",
"pagesizes",
".",
"A1",
",",
"'A2'",
":",
"pagesizes",
".",
"A2",
",",
"'A3'",
":",
"pagesizes",
".",
"A3",
",",
"'A4'",
":",
"p... | convert size string into a reportlab pagesize . | train | false |
31,450 | @skip('win32')
def test_new_thread_is_background():
import thread
global done
done = None
def f():
global done
done = Thread.CurrentThread.IsBackground
thread.start_new_thread(f, ())
while (done == None):
Thread.Sleep(100)
Assert(done)
| [
"@",
"skip",
"(",
"'win32'",
")",
"def",
"test_new_thread_is_background",
"(",
")",
":",
"import",
"thread",
"global",
"done",
"done",
"=",
"None",
"def",
"f",
"(",
")",
":",
"global",
"done",
"done",
"=",
"Thread",
".",
"CurrentThread",
".",
"IsBackground... | verify new threads created during python are background threads . | train | false |
31,451 | def set_middleware_defaults():
cfg.set_defaults(cors.CORS_OPTS, allow_headers=['X-Auth-Token', 'X-Identity-Status', 'X-Roles', 'X-Service-Catalog', 'X-User-Id', 'X-Tenant-Id', 'X-OpenStack-Request-ID', 'X-Trace-Info', 'X-Trace-HMAC', 'OpenStack-API-Version'], expose_headers=['X-Auth-Token', 'X-Subject-Token', 'X-Service-Token', 'X-OpenStack-Request-ID', 'OpenStack-API-Version'], allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH', 'HEAD'])
| [
"def",
"set_middleware_defaults",
"(",
")",
":",
"cfg",
".",
"set_defaults",
"(",
"cors",
".",
"CORS_OPTS",
",",
"allow_headers",
"=",
"[",
"'X-Auth-Token'",
",",
"'X-Identity-Status'",
",",
"'X-Roles'",
",",
"'X-Service-Catalog'",
",",
"'X-User-Id'",
",",
"'X-Ten... | update default configuration options for oslo . | train | false |
31,452 | def plot_really_bad_images(images):
really_bad_image_indices = [324, 582, 659, 726, 846, 956, 1124, 1393, 1773, 1868, 2018, 2109, 2654, 4199, 4201, 4620, 5457, 5642]
n = len(really_bad_image_indices)
really_bad_images = [images[j] for j in really_bad_image_indices]
fig = plt.figure(figsize=(10, 2))
for j in xrange(1, (n + 1)):
ax = fig.add_subplot(2, 9, j)
ax.matshow(really_bad_images[(j - 1)], cmap=matplotlib.cm.binary)
plt.xticks(np.array([]))
plt.yticks(np.array([]))
plt.show()
| [
"def",
"plot_really_bad_images",
"(",
"images",
")",
":",
"really_bad_image_indices",
"=",
"[",
"324",
",",
"582",
",",
"659",
",",
"726",
",",
"846",
",",
"956",
",",
"1124",
",",
"1393",
",",
"1773",
",",
"1868",
",",
"2018",
",",
"2109",
",",
"265... | this takes a list of the worst images from plot_bad_images and turns them into a figure . | train | false |
31,453 | def check_for_updates(application_configuration):
update_server = appengine_rpc.HttpRpcServer(_UPDATE_SERVER, (lambda : ('unused_email', 'unused_password')), _get_user_agent(), _get_source_name())
update_server.authenticated = True
if application_configuration.servers:
update_check = sdk_update_checker.SDKUpdateChecker(update_server, application_configuration.servers)
update_check.CheckSupportedVersion()
if update_check.AllowedToCheckForUpdates():
update_check.CheckForUpdates()
| [
"def",
"check_for_updates",
"(",
"application_configuration",
")",
":",
"update_server",
"=",
"appengine_rpc",
".",
"HttpRpcServer",
"(",
"_UPDATE_SERVER",
",",
"(",
"lambda",
":",
"(",
"'unused_email'",
",",
"'unused_password'",
")",
")",
",",
"_get_user_agent",
"(... | checks for updates to the sdk . | train | false |
31,454 | def percentileRank(frame, column=None, kind='mean'):
fun = (lambda xs, score: percentileofscore(remove_na(xs), score, kind=kind))
results = {}
framet = frame.T
if (column is not None):
if isinstance(column, Series):
for (date, xs) in compat.iteritems(frame.T):
results[date] = fun(xs, column.get(date, NaN))
else:
for (date, xs) in compat.iteritems(frame.T):
results[date] = fun(xs, xs[column])
results = Series(results)
else:
for column in frame.columns:
for (date, xs) in compat.iteritems(framet):
results.setdefault(date, {})[column] = fun(xs, xs[column])
results = DataFrame(results).T
return results
| [
"def",
"percentileRank",
"(",
"frame",
",",
"column",
"=",
"None",
",",
"kind",
"=",
"'mean'",
")",
":",
"fun",
"=",
"(",
"lambda",
"xs",
",",
"score",
":",
"percentileofscore",
"(",
"remove_na",
"(",
"xs",
")",
",",
"score",
",",
"kind",
"=",
"kind"... | return score at percentile for each point in time parameters frame: dataframe column: string or series . | train | false |
31,455 | def test_profile_create_ipython_dir():
with TemporaryDirectory() as td:
getoutput([sys.executable, '-m', 'IPython', 'profile', 'create', 'foo', ('--ipython-dir=%s' % td)])
profile_dir = os.path.join(td, 'profile_foo')
assert os.path.exists(profile_dir)
ipython_config = os.path.join(profile_dir, 'ipython_config.py')
assert os.path.exists(ipython_config)
| [
"def",
"test_profile_create_ipython_dir",
"(",
")",
":",
"with",
"TemporaryDirectory",
"(",
")",
"as",
"td",
":",
"getoutput",
"(",
"[",
"sys",
".",
"executable",
",",
"'-m'",
",",
"'IPython'",
",",
"'profile'",
",",
"'create'",
",",
"'foo'",
",",
"(",
"'-... | ipython profile create respects --ipython-dir . | train | false |
31,456 | @instrumented_task(name='sentry.tasks.process_buffer.process_incr')
def process_incr(**kwargs):
from sentry import app
app.buffer.process(**kwargs)
| [
"@",
"instrumented_task",
"(",
"name",
"=",
"'sentry.tasks.process_buffer.process_incr'",
")",
"def",
"process_incr",
"(",
"**",
"kwargs",
")",
":",
"from",
"sentry",
"import",
"app",
"app",
".",
"buffer",
".",
"process",
"(",
"**",
"kwargs",
")"
] | processes a buffer event . | train | false |
31,458 | def set_remote_url(git_path, module, repo, dest, remote):
remote_url = get_remote_url(git_path, module, dest, remote)
if ((remote_url == repo) or (remote_url == unfrackgitpath(repo))):
return False
command = [git_path, 'remote', 'set-url', remote, repo]
(rc, out, err) = module.run_command(command, cwd=dest)
if (rc != 0):
label = ('set a new url %s for %s' % (repo, remote))
module.fail_json(msg=('Failed to %s: %s %s' % (label, out, err)))
return (remote_url is not None)
| [
"def",
"set_remote_url",
"(",
"git_path",
",",
"module",
",",
"repo",
",",
"dest",
",",
"remote",
")",
":",
"remote_url",
"=",
"get_remote_url",
"(",
"git_path",
",",
"module",
",",
"dest",
",",
"remote",
")",
"if",
"(",
"(",
"remote_url",
"==",
"repo",
... | updates repo from remote sources . | train | false |
31,459 | def get_provide_specs_and_objects(category):
return _load_provide_objects(category).copy()
| [
"def",
"get_provide_specs_and_objects",
"(",
"category",
")",
":",
"return",
"_load_provide_objects",
"(",
"category",
")",
".",
"copy",
"(",
")"
] | get a mapping of provide specs to their loaded objects . | train | false |
31,460 | def _float_to_json(value):
return value
| [
"def",
"_float_to_json",
"(",
"value",
")",
":",
"return",
"value"
] | coerce value to an json-compatible representation . | train | false |
31,461 | def find_images(ami_name=None, executable_by=None, owners=None, image_ids=None, tags=None, region=None, key=None, keyid=None, profile=None, return_objs=False):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
filter_parameters = {'filters': {}}
if image_ids:
filter_parameters['image_ids'] = [image_ids]
if executable_by:
filter_parameters['executable_by'] = [executable_by]
if owners:
filter_parameters['owners'] = [owners]
if ami_name:
filter_parameters['filters']['name'] = ami_name
if tags:
for (tag_name, tag_value) in six.iteritems(tags):
filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value
images = conn.get_all_images(**filter_parameters)
log.debug('The filters criteria {0} matched the following images:{1}'.format(filter_parameters, images))
if images:
if return_objs:
return images
return [image.id for image in images]
else:
return False
except boto.exception.BotoServerError as exc:
log.error(exc)
return False
| [
"def",
"find_images",
"(",
"ami_name",
"=",
"None",
",",
"executable_by",
"=",
"None",
",",
"owners",
"=",
"None",
",",
"image_ids",
"=",
"None",
",",
"tags",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None"... | given image properties . | train | true |
31,463 | def MutableNamedTupleRow(cursor):
from recordtype import recordtype
attr_names = [x[0] for x in cursor._ColBufferList]
class Row(recordtype('Row', attr_names, rename=True), ):
cursor_description = cursor.description
def __init__(self, iterable):
super(Row, self).__init__(*iterable)
def __iter__(self):
for field_name in self.__slots__:
(yield getattr(self, field_name))
def __getitem__(self, index):
if isinstance(index, slice):
return tuple((getattr(self, x) for x in self.__slots__[index]))
return getattr(self, self.__slots__[index])
def __setitem__(self, index, value):
setattr(self, self.__slots__[index], value)
return Row
| [
"def",
"MutableNamedTupleRow",
"(",
"cursor",
")",
":",
"from",
"recordtype",
"import",
"recordtype",
"attr_names",
"=",
"[",
"x",
"[",
"0",
"]",
"for",
"x",
"in",
"cursor",
".",
"_ColBufferList",
"]",
"class",
"Row",
"(",
"recordtype",
"(",
"'Row'",
",",
... | mutable named tuple to allow attribute to be replaced . | train | false |
31,464 | def _lint(file_or_dir, template_linters, options, summary_results, out):
if ((file_or_dir is not None) and os.path.isfile(file_or_dir)):
_process_file(file_or_dir, template_linters, options, summary_results, out)
else:
directory = '.'
if (file_or_dir is not None):
if os.path.exists(file_or_dir):
directory = file_or_dir
else:
raise ValueError('Path [{}] is not a valid file or directory.'.format(file_or_dir))
_process_os_dirs(directory, template_linters, options, summary_results, out)
summary_results.print_results(options, out)
| [
"def",
"_lint",
"(",
"file_or_dir",
",",
"template_linters",
",",
"options",
",",
"summary_results",
",",
"out",
")",
":",
"if",
"(",
"(",
"file_or_dir",
"is",
"not",
"None",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"file_or_dir",
")",
")",
"... | for each linter . | train | false |
31,465 | @task
@write
def reject_rereview(theme):
storage = LocalFileStorage()
rereview = theme.rereviewqueuetheme_set.all()
reupload = rereview[0]
storage.delete(reupload.header_path)
if reupload.footer:
storage.delete(reupload.footer_path)
rereview.delete()
| [
"@",
"task",
"@",
"write",
"def",
"reject_rereview",
"(",
"theme",
")",
":",
"storage",
"=",
"LocalFileStorage",
"(",
")",
"rereview",
"=",
"theme",
".",
"rereviewqueuetheme_set",
".",
"all",
"(",
")",
"reupload",
"=",
"rereview",
"[",
"0",
"]",
"storage",... | delete pending theme from filesystem . | train | false |
31,467 | def flatten_metadata(metadata):
if metadata:
return dict(((k.replace('.', ':').replace(':', '.', 1), six.text_type(v)) for (k, v) in utils.recursive_keypairs(metadata, separator='.') if (type(v) is not set)))
return {}
| [
"def",
"flatten_metadata",
"(",
"metadata",
")",
":",
"if",
"metadata",
":",
"return",
"dict",
"(",
"(",
"(",
"k",
".",
"replace",
"(",
"'.'",
",",
"':'",
")",
".",
"replace",
"(",
"':'",
",",
"'.'",
",",
"1",
")",
",",
"six",
".",
"text_type",
"... | return flattened resource metadata . | train | false |
31,469 | def create_http_server(host=None, port=0):
try:
http = HTTPServer()
http.bind(host=host, port=port)
except OSError as err:
console.exit('Failed to create HTTP server: {0}', err)
return http
| [
"def",
"create_http_server",
"(",
"host",
"=",
"None",
",",
"port",
"=",
"0",
")",
":",
"try",
":",
"http",
"=",
"HTTPServer",
"(",
")",
"http",
".",
"bind",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"port",
")",
"except",
"OSError",
"as",
"err",... | creates a http server listening on a given host and port . | train | true |
31,471 | def get_privacy_url():
return get_url('PRIVACY')
| [
"def",
"get_privacy_url",
"(",
")",
":",
"return",
"get_url",
"(",
"'PRIVACY'",
")"
] | lookup and return privacy policies page url . | train | false |
31,472 | def log_last_traceback(msg=None, log=logging.error):
if (not log):
log = logging.error
if msg:
log(msg)
(exc_type, exc_value, exc_traceback) = sys.exc_info()
if (not exc_traceback):
log('Requested log_last_traceback but no exception was raised.')
return
log(('Original ' + ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback))))
| [
"def",
"log_last_traceback",
"(",
"msg",
"=",
"None",
",",
"log",
"=",
"logging",
".",
"error",
")",
":",
"if",
"(",
"not",
"log",
")",
":",
"log",
"=",
"logging",
".",
"error",
"if",
"msg",
":",
"log",
"(",
"msg",
")",
"(",
"exc_type",
",",
"exc... | writes last traceback into specified log . | train | false |
31,473 | def disk(radius, dtype=np.uint8):
L = np.arange((- radius), (radius + 1))
(X, Y) = np.meshgrid(L, L)
return np.array((((X ** 2) + (Y ** 2)) <= (radius ** 2)), dtype=dtype)
| [
"def",
"disk",
"(",
"radius",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
":",
"L",
"=",
"np",
".",
"arange",
"(",
"(",
"-",
"radius",
")",
",",
"(",
"radius",
"+",
"1",
")",
")",
"(",
"X",
",",
"Y",
")",
"=",
"np",
".",
"meshgrid",
"(",
... | generates a flat . | train | false |
31,474 | def _launch_threads():
from . import workqueue as lib
from ctypes import CFUNCTYPE, c_int
launch_threads = CFUNCTYPE(None, c_int)(lib.launch_threads)
launch_threads(NUM_THREADS)
| [
"def",
"_launch_threads",
"(",
")",
":",
"from",
".",
"import",
"workqueue",
"as",
"lib",
"from",
"ctypes",
"import",
"CFUNCTYPE",
",",
"c_int",
"launch_threads",
"=",
"CFUNCTYPE",
"(",
"None",
",",
"c_int",
")",
"(",
"lib",
".",
"launch_threads",
")",
"la... | initialize work queues and workers . | train | false |
31,475 | def list_team_repos(team_name, profile='github', ignore_cache=False):
cached_team = get_team(team_name, profile=profile)
if (not cached_team):
log.error('Team {0} does not exist.'.format(team_name))
return False
if (cached_team.get('repos') and (not ignore_cache)):
return cached_team.get('repos')
try:
client = _get_client(profile)
organization = client.get_organization(_get_config_value(profile, 'org_name'))
team = organization.get_team(cached_team['id'])
except UnknownObjectException as e:
log.exception('Resource not found: {0}'.format(cached_team['id']))
try:
repos = {}
for repo in team.get_repos():
permission = 'pull'
if repo.permissions.admin:
permission = 'admin'
elif repo.permissions.push:
permission = 'push'
repos[repo.name.lower()] = {'permission': permission}
cached_team['repos'] = repos
return repos
except UnknownObjectException as e:
log.exception('Resource not found: {0}'.format(cached_team['id']))
return []
| [
"def",
"list_team_repos",
"(",
"team_name",
",",
"profile",
"=",
"'github'",
",",
"ignore_cache",
"=",
"False",
")",
":",
"cached_team",
"=",
"get_team",
"(",
"team_name",
",",
"profile",
"=",
"profile",
")",
"if",
"(",
"not",
"cached_team",
")",
":",
"log... | gets the repo details for a given team as a dict from repo_name to repo details . | train | true |
31,476 | def ImportStateMessage(state):
return {STATE_READ: 'Batch read from file.', STATE_SENDING: 'Sending batch to server.', STATE_SENT: 'Batch successfully sent.', STATE_NOT_SENT: 'Error while sending batch.'}[state]
| [
"def",
"ImportStateMessage",
"(",
"state",
")",
":",
"return",
"{",
"STATE_READ",
":",
"'Batch read from file.'",
",",
"STATE_SENDING",
":",
"'Sending batch to server.'",
",",
"STATE_SENT",
":",
"'Batch successfully sent.'",
",",
"STATE_NOT_SENT",
":",
"'Error while sendi... | converts a numeric state identifier to a status message . | train | false |
31,477 | def _parse_version(text):
(major, major2, minor) = VERSION_RE.search(text).groups()
try:
return (int(major), int(major2), int(minor))
except (ValueError, TypeError):
return (int(major), int(major2), None)
| [
"def",
"_parse_version",
"(",
"text",
")",
":",
"(",
"major",
",",
"major2",
",",
"minor",
")",
"=",
"VERSION_RE",
".",
"search",
"(",
"text",
")",
".",
"groups",
"(",
")",
"try",
":",
"return",
"(",
"int",
"(",
"major",
")",
",",
"int",
"(",
"ma... | internal parsing method . | train | false |
31,479 | def subscribe_to_basket_action(newsletter):
def subscribe_to_basket(modeladmin, request, queryset):
'Subscribe to Basket or update details of already subscribed.'
ts = [subscribe_user_to_basket.subtask(args=[userprofile.id, [newsletter]]) for userprofile in queryset]
TaskSet(ts).apply_async()
messages.success(request, 'Basket update started.')
subscribe_to_basket.short_description = 'Subscribe to or Update {0}'.format(newsletter)
subscribe_to_basket.__name__ = 'subscribe_to_basket_{0}'.format(newsletter.replace('-', '_'))
return subscribe_to_basket
| [
"def",
"subscribe_to_basket_action",
"(",
"newsletter",
")",
":",
"def",
"subscribe_to_basket",
"(",
"modeladmin",
",",
"request",
",",
"queryset",
")",
":",
"ts",
"=",
"[",
"subscribe_user_to_basket",
".",
"subtask",
"(",
"args",
"=",
"[",
"userprofile",
".",
... | subscribe to basket action . | train | false |
31,480 | def to_arr(this):
return [this.get(str(e)) for e in xrange(len(this))]
| [
"def",
"to_arr",
"(",
"this",
")",
":",
"return",
"[",
"this",
".",
"get",
"(",
"str",
"(",
"e",
")",
")",
"for",
"e",
"in",
"xrange",
"(",
"len",
"(",
"this",
")",
")",
"]"
] | returns python array from js array . | train | true |
31,481 | def alarm(bot, job):
bot.sendMessage(job.context, text='Beep!')
| [
"def",
"alarm",
"(",
"bot",
",",
"job",
")",
":",
"bot",
".",
"sendMessage",
"(",
"job",
".",
"context",
",",
"text",
"=",
"'Beep!'",
")"
] | function to send the alarm message . | train | false |
31,482 | def testString():
tests = [('%s <%s>' % (name, func.__doc__)) for (name, func) in testDict().iteritems()]
return ('valid tests: %s' % ', '.join(tests))
| [
"def",
"testString",
"(",
")",
":",
"tests",
"=",
"[",
"(",
"'%s <%s>'",
"%",
"(",
"name",
",",
"func",
".",
"__doc__",
")",
")",
"for",
"(",
"name",
",",
"func",
")",
"in",
"testDict",
"(",
")",
".",
"iteritems",
"(",
")",
"]",
"return",
"(",
... | return string listing valid tests . | train | false |
31,484 | def set_chassis_name(name, host=None, admin_username=None, admin_password=None):
return __execute_cmd('setsysinfo -c chassisname {0}'.format(name), host=host, admin_username=admin_username, admin_password=admin_password)
| [
"def",
"set_chassis_name",
"(",
"name",
",",
"host",
"=",
"None",
",",
"admin_username",
"=",
"None",
",",
"admin_password",
"=",
"None",
")",
":",
"return",
"__execute_cmd",
"(",
"'setsysinfo -c chassisname {0}'",
".",
"format",
"(",
"name",
")",
",",
"host",... | set the name of the chassis . | train | true |
31,487 | def add_libravatar(generator, metadata):
missing = generator.settings.get('LIBRAVATAR_MISSING')
size = generator.settings.get('LIBRAVATAR_SIZE')
if ('email' not in metadata.keys()):
try:
metadata['email'] = generator.settings.get('AUTHOR_EMAIL')
except:
pass
if metadata['email']:
email = metadata['email'].lower().encode('ascii')
md5 = hashlib.md5(email).hexdigest()
url = ('http://cdn.libravatar.org/avatar/' + md5)
if (missing or size):
url = (url + '?')
if missing:
url = ((url + 'd=') + missing)
if size:
url = (url + '&')
if size:
url = ((url + 's=') + str(size))
metadata['author_libravatar'] = url
| [
"def",
"add_libravatar",
"(",
"generator",
",",
"metadata",
")",
":",
"missing",
"=",
"generator",
".",
"settings",
".",
"get",
"(",
"'LIBRAVATAR_MISSING'",
")",
"size",
"=",
"generator",
".",
"settings",
".",
"get",
"(",
"'LIBRAVATAR_SIZE'",
")",
"if",
"(",... | article generator connector for the libravatar plugin . | train | true |
31,488 | def check_dependencies():
if (not HAS_VIRTUALENV):
raise Exception(('Virtualenv not found. ' + 'Try installing python-virtualenv'))
print 'done.'
| [
"def",
"check_dependencies",
"(",
")",
":",
"if",
"(",
"not",
"HAS_VIRTUALENV",
")",
":",
"raise",
"Exception",
"(",
"(",
"'Virtualenv not found. '",
"+",
"'Try installing python-virtualenv'",
")",
")",
"print",
"'done.'"
] | check that the admins dependencies are correctly installed . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.