id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
30,836 | def vm_monitoring(name, call=None):
if (call != 'action'):
raise SaltCloudSystemExit('The vm_monitoring action must be called with -a or --action.')
(server, user, password) = _get_xml_rpc()
auth = ':'.join([user, password])
vm_id = int(get_vm_id(kwargs={'name': name}))
response = server.one.vm.monitoring(auth, vm_id)
if (response[0] is False):
log.error("There was an error retrieving the specified VM's monitoring information.")
return {}
else:
info = {}
for vm_ in _get_xml(response[1]):
info[vm_.find('ID').text] = _xml_to_dict(vm_)
return info
| [
"def",
"vm_monitoring",
"(",
"name",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'action'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vm_monitoring action must be called with -a or --action.'",
")",
"(",
"server",
",",
"user",
",",
"... | returns the monitoring records for a given virtual machine . | train | true |
30,837 | def is_url_in_cache(url_key):
try:
(dldir, urlmapfn) = _get_download_cache_locs()
except (IOError, OSError) as e:
msg = u'Remote data cache could not be accessed due to '
estr = (u'' if (len(e.args) < 1) else (u': ' + str(e)))
warn(CacheMissingWarning(((msg + e.__class__.__name__) + estr)))
return False
if (six.PY2 and isinstance(url_key, six.text_type)):
url_key = url_key.encode(u'utf-8')
with _open_shelve(urlmapfn, True) as url2hash:
if (url_key in url2hash):
return True
return False
| [
"def",
"is_url_in_cache",
"(",
"url_key",
")",
":",
"try",
":",
"(",
"dldir",
",",
"urlmapfn",
")",
"=",
"_get_download_cache_locs",
"(",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
"as",
"e",
":",
"msg",
"=",
"u'Remote data cache could not be accessed... | check if a download from url_key is in the cache . | train | false |
30,838 | def trace_cov(trace, vars=None, model=None):
model = modelcontext(model)
if (model is not None):
vars = model.free_RVs
elif (vars is None):
vars = trace.varnames
def flat_t(var):
x = trace[str(var)]
return x.reshape((x.shape[0], np.prod(x.shape[1:], dtype=int)))
return np.cov(np.concatenate(list(map(flat_t, vars)), 1).T)
| [
"def",
"trace_cov",
"(",
"trace",
",",
"vars",
"=",
"None",
",",
"model",
"=",
"None",
")",
":",
"model",
"=",
"modelcontext",
"(",
"model",
")",
"if",
"(",
"model",
"is",
"not",
"None",
")",
":",
"vars",
"=",
"model",
".",
"free_RVs",
"elif",
"(",... | calculate the flattened covariance matrix using a sample trace useful if you want to base your covariance matrix for further sampling on some initial samples . | train | false |
30,839 | def write_renewal_config(o_filename, n_filename, archive_dir, target, relevant_data):
config = configobj.ConfigObj(o_filename)
config['version'] = certbot.__version__
config['archive_dir'] = archive_dir
for kind in ALL_FOUR:
config[kind] = target[kind]
if ('renewalparams' not in config):
config['renewalparams'] = {}
config.comments['renewalparams'] = ['', 'Options used in the renewal process']
config['renewalparams'].update(relevant_data)
for k in config['renewalparams'].keys():
if (k not in relevant_data):
del config['renewalparams'][k]
if ('renew_before_expiry' not in config):
default_interval = constants.RENEWER_DEFAULTS['renew_before_expiry']
config.initial_comment = [('renew_before_expiry = ' + default_interval)]
logger.debug('Writing new config %s.', n_filename)
with open(n_filename, 'wb') as f:
config.write(outfile=f)
return config
| [
"def",
"write_renewal_config",
"(",
"o_filename",
",",
"n_filename",
",",
"archive_dir",
",",
"target",
",",
"relevant_data",
")",
":",
"config",
"=",
"configobj",
".",
"ConfigObj",
"(",
"o_filename",
")",
"config",
"[",
"'version'",
"]",
"=",
"certbot",
".",
... | writes a renewal config file with the specified name and values . | train | false |
30,840 | def get_vis_chains(layer_to_chains, model, dataset):
vis_chains = layer_to_chains[model.visible_layer]
vis_chains = vis_chains.get_value()
print(vis_chains.shape)
if (vis_chains.ndim == 2):
vis_chains = dataset.get_topological_view(vis_chains)
print(vis_chains.shape)
vis_chains = dataset.adjust_for_viewer(vis_chains)
return vis_chains
| [
"def",
"get_vis_chains",
"(",
"layer_to_chains",
",",
"model",
",",
"dataset",
")",
":",
"vis_chains",
"=",
"layer_to_chains",
"[",
"model",
".",
"visible_layer",
"]",
"vis_chains",
"=",
"vis_chains",
".",
"get_value",
"(",
")",
"print",
"(",
"vis_chains",
"."... | get visible chains formatted for the path viewer . | train | false |
30,841 | def retrieve(sha1, **kwargs):
url = build_url(RESOURCE)
params = make_params(sha1=sha1)
return request('get', url, params=params, **kwargs)
| [
"def",
"retrieve",
"(",
"sha1",
",",
"**",
"kwargs",
")",
":",
"url",
"=",
"build_url",
"(",
"RESOURCE",
")",
"params",
"=",
"make_params",
"(",
"sha1",
"=",
"sha1",
")",
"return",
"request",
"(",
"'get'",
",",
"url",
",",
"params",
"=",
"params",
",... | retrieve a general file from plotly . | train | false |
30,842 | def _estimate_rank_meeg_signals(data, info, scalings, tol='auto', return_singular=False):
picks_list = _picks_by_type(info)
_apply_scaling_array(data, picks_list, scalings)
if (data.shape[1] < data.shape[0]):
ValueError("You've got fewer samples than channels, your rank estimate might be inaccurate.")
out = estimate_rank(data, tol=tol, norm=False, return_singular=return_singular)
rank = (out[0] if isinstance(out, tuple) else out)
ch_type = ' + '.join(list(zip(*picks_list))[0])
logger.info(('estimated rank (%s): %d' % (ch_type, rank)))
_undo_scaling_array(data, picks_list, scalings)
return out
| [
"def",
"_estimate_rank_meeg_signals",
"(",
"data",
",",
"info",
",",
"scalings",
",",
"tol",
"=",
"'auto'",
",",
"return_singular",
"=",
"False",
")",
":",
"picks_list",
"=",
"_picks_by_type",
"(",
"info",
")",
"_apply_scaling_array",
"(",
"data",
",",
"picks_... | estimate rank for m/eeg data . | train | false |
30,843 | def use_connection(redis=None):
assert (len(_connection_stack) <= 1), u'You should not mix Connection contexts with use_connection()'
release_local(_connection_stack)
if (redis is None):
redis = StrictRedis()
push_connection(redis)
| [
"def",
"use_connection",
"(",
"redis",
"=",
"None",
")",
":",
"assert",
"(",
"len",
"(",
"_connection_stack",
")",
"<=",
"1",
")",
",",
"u'You should not mix Connection contexts with use_connection()'",
"release_local",
"(",
"_connection_stack",
")",
"if",
"(",
"red... | clears the stack and uses the given connection . | train | false |
30,845 | def _get_spacewalk_configuration(spacewalk_url=''):
spacewalk_config = (__opts__['spacewalk'] if ('spacewalk' in __opts__) else None)
if spacewalk_config:
try:
for (spacewalk_server, service_config) in six.iteritems(spacewalk_config):
username = service_config.get('username', None)
password = service_config.get('password', None)
protocol = service_config.get('protocol', 'https')
if ((not username) or (not password)):
log.error('Username or Password has not been specified in the master configuration for {0}'.format(spacewalk_server))
return False
ret = {'api_url': '{0}://{1}/rpc/api'.format(protocol, spacewalk_server), 'username': username, 'password': password}
if ((not spacewalk_url) or (spacewalk_url == spacewalk_server)):
return ret
except Exception as exc:
log.error('Exception encountered: {0}'.format(exc))
return False
if spacewalk_url:
log.error('Configuration for {0} has not been specified in the master configuration'.format(spacewalk_url))
return False
return False
| [
"def",
"_get_spacewalk_configuration",
"(",
"spacewalk_url",
"=",
"''",
")",
":",
"spacewalk_config",
"=",
"(",
"__opts__",
"[",
"'spacewalk'",
"]",
"if",
"(",
"'spacewalk'",
"in",
"__opts__",
")",
"else",
"None",
")",
"if",
"spacewalk_config",
":",
"try",
":"... | return the configuration read from the master configuration file or directory . | train | true |
30,846 | def set_permissions(username, permissions, uid=None):
privileges = {'login': '0x0000001', 'drac': '0x0000002', 'user_management': '0x0000004', 'clear_logs': '0x0000008', 'server_control_commands': '0x0000010', 'console_redirection': '0x0000020', 'virtual_media': '0x0000040', 'test_alerts': '0x0000080', 'debug_commands': '0x0000100'}
permission = 0
if (uid is None):
user = list_users()
uid = user[username]['index']
for i in permissions.split(','):
perm = i.strip()
if (perm in privileges):
permission += int(privileges[perm], 16)
return __execute_cmd('config -g cfgUserAdmin -o cfgUserAdminPrivilege -i {0} 0x{1:08X}'.format(uid, permission))
| [
"def",
"set_permissions",
"(",
"username",
",",
"permissions",
",",
"uid",
"=",
"None",
")",
":",
"privileges",
"=",
"{",
"'login'",
":",
"'0x0000001'",
",",
"'drac'",
":",
"'0x0000002'",
",",
"'user_management'",
":",
"'0x0000004'",
",",
"'clear_logs'",
":",
... | give folder tree and its files their proper permissions . | train | true |
30,847 | def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, **kwargs):
try:
response_type = kwargs.pop(u'returns', u'text')
stream = kwargs.pop(u'stream', False)
(hooks, cookies, verify, proxies) = request_defaults(kwargs)
if (params and isinstance(params, (list, dict))):
for param in params:
if isinstance(params[param], unicode):
params[param] = params[param].encode(u'utf-8')
if (post_data and isinstance(post_data, (list, dict))):
for param in post_data:
if isinstance(post_data[param], unicode):
post_data[param] = post_data[param].encode(u'utf-8')
resp = session.request((u'POST' if post_data else u'GET'), url, data=(post_data or {}), params=(params or {}), timeout=timeout, allow_redirects=True, hooks=hooks, stream=stream, headers=headers, cookies=cookies, proxies=proxies, verify=verify)
resp.raise_for_status()
except Exception as error:
handle_requests_exception(error)
return None
try:
return (resp if ((response_type == u'response') or (response_type is None)) else (resp.json() if (response_type == u'json') else getattr(resp, response_type, resp)))
except ValueError:
logger.log(u'Requested a json response but response was not json, check the url: {0}'.format(url), logger.DEBUG)
return None
| [
"def",
"getURL",
"(",
"url",
",",
"post_data",
"=",
"None",
",",
"params",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"timeout",
"=",
"30",
",",
"session",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"try",
":",
"response_type",
"=",
"kwargs",
... | convenience method to directly retrieve the contents of a url . | train | false |
30,848 | def accept_suggestion(editor_id, thread_id, exploration_id, commit_message):
if ((not commit_message) or (not commit_message.strip())):
raise Exception('Commit message cannot be empty.')
if _is_suggestion_handled(thread_id, exploration_id):
raise Exception('Suggestion has already been accepted/rejected.')
elif (not _is_suggestion_valid(thread_id, exploration_id)):
raise Exception('Invalid suggestion: The state for which it was made has been removed/renamed.')
else:
suggestion = feedback_services.get_suggestion(exploration_id, thread_id)
suggestion_author_username = suggestion.get_author_name()
change_list = _create_change_list_from_suggestion(suggestion)
update_exploration(editor_id, exploration_id, change_list, _get_commit_message_for_suggestion(suggestion_author_username, commit_message), is_suggestion=True)
feedback_services.create_message(exploration_id, thread_id, editor_id, feedback_models.STATUS_CHOICES_FIXED, None, 'Suggestion accepted.')
| [
"def",
"accept_suggestion",
"(",
"editor_id",
",",
"thread_id",
",",
"exploration_id",
",",
"commit_message",
")",
":",
"if",
"(",
"(",
"not",
"commit_message",
")",
"or",
"(",
"not",
"commit_message",
".",
"strip",
"(",
")",
")",
")",
":",
"raise",
"Excep... | if the suggestion is valid . | train | false |
30,849 | def getPhotometerByName(name):
for photom in getAllPhotometers():
if ((name.lower() in photom.driverFor) or (name == photom.longName)):
return photom
| [
"def",
"getPhotometerByName",
"(",
"name",
")",
":",
"for",
"photom",
"in",
"getAllPhotometers",
"(",
")",
":",
"if",
"(",
"(",
"name",
".",
"lower",
"(",
")",
"in",
"photom",
".",
"driverFor",
")",
"or",
"(",
"name",
"==",
"photom",
".",
"longName",
... | gets a photometer class by name . | train | false |
30,850 | @slow_test
def test_apply_function_verbose():
n_chan = 2
n_times = 3
ch_names = [str(ii) for ii in range(n_chan)]
raw = RawArray(np.zeros((n_chan, n_times)), create_info(ch_names, 1.0, 'mag'))
assert_raises(TypeError, raw.apply_function, bad_1)
assert_raises(ValueError, raw.apply_function, bad_2)
assert_raises(TypeError, raw.apply_function, bad_1, n_jobs=2)
assert_raises(ValueError, raw.apply_function, bad_2, n_jobs=2)
with catch_logging() as sio:
out = raw.apply_function(printer, verbose=False)
assert_equal(len(sio.getvalue()), 0)
assert_true((out is raw))
raw.apply_function(printer, verbose=True)
assert_equal(sio.getvalue().count('\n'), n_chan)
| [
"@",
"slow_test",
"def",
"test_apply_function_verbose",
"(",
")",
":",
"n_chan",
"=",
"2",
"n_times",
"=",
"3",
"ch_names",
"=",
"[",
"str",
"(",
"ii",
")",
"for",
"ii",
"in",
"range",
"(",
"n_chan",
")",
"]",
"raw",
"=",
"RawArray",
"(",
"np",
".",
... | test apply function verbosity . | train | false |
30,851 | def test_slugify_spaces():
assert (utils.slugify(' b ar ') == 'b-ar')
assert (utils.slugify(' b ar ', spaces=True) == 'b ar')
assert (utils.slugify(' b ar ', spaces=True) == 'b ar')
| [
"def",
"test_slugify_spaces",
"(",
")",
":",
"assert",
"(",
"utils",
".",
"slugify",
"(",
"' b ar '",
")",
"==",
"'b-ar'",
")",
"assert",
"(",
"utils",
".",
"slugify",
"(",
"' b ar '",
",",
"spaces",
"=",
"True",
")",
"==",
"'b ar'",
")",
"assert",
"("... | we want slugify to preserve spaces . | train | false |
30,854 | def stable_var(input_, mean=None, axes=[0]):
if (mean is None):
mean = tf.reduce_mean(input_, axes)
res = tf.square((input_ - mean))
max_sqr = tf.reduce_max(res, axes)
res /= max_sqr
res = tf.reduce_mean(res, axes)
res *= max_sqr
return res
| [
"def",
"stable_var",
"(",
"input_",
",",
"mean",
"=",
"None",
",",
"axes",
"=",
"[",
"0",
"]",
")",
":",
"if",
"(",
"mean",
"is",
"None",
")",
":",
"mean",
"=",
"tf",
".",
"reduce_mean",
"(",
"input_",
",",
"axes",
")",
"res",
"=",
"tf",
".",
... | numerically more stable variance computation . | train | false |
30,855 | def neighsol(addr, src, iface, timeout=1, chainCC=0):
nsma = in6_getnsma(inet_pton(socket.AF_INET6, addr))
d = inet_ntop(socket.AF_INET6, nsma)
dm = in6_getnsmac(nsma)
p = (Ether(dst=dm) / IPv6(dst=d, src=src, hlim=255))
p /= ICMPv6ND_NS(tgt=addr)
p /= ICMPv6NDOptSrcLLAddr(lladdr=get_if_hwaddr(iface))
res = srp1(p, type=ETH_P_IPV6, iface=iface, timeout=1, verbose=0, chainCC=chainCC)
return res
| [
"def",
"neighsol",
"(",
"addr",
",",
"src",
",",
"iface",
",",
"timeout",
"=",
"1",
",",
"chainCC",
"=",
"0",
")",
":",
"nsma",
"=",
"in6_getnsma",
"(",
"inet_pton",
"(",
"socket",
".",
"AF_INET6",
",",
"addr",
")",
")",
"d",
"=",
"inet_ntop",
"(",... | sends an icmpv6 neighbor solicitation message to get the mac address of the neighbor with specified ipv6 address addr . | train | true |
30,858 | def splitPath(path, sep='/'):
sepIdx = path.find(sep)
if (sepIdx < 0):
return (path, None)
return (path[:sepIdx], path[(sepIdx + 1):])
| [
"def",
"splitPath",
"(",
"path",
",",
"sep",
"=",
"'/'",
")",
":",
"sepIdx",
"=",
"path",
".",
"find",
"(",
"sep",
")",
"if",
"(",
"sepIdx",
"<",
"0",
")",
":",
"return",
"(",
"path",
",",
"None",
")",
"return",
"(",
"path",
"[",
":",
"sepIdx",... | chops the first part of a /-separated path and returns a tuple of the first part and the tail . | train | false |
30,859 | def safeeval(expr, globals=None, locals=None):
safe_globals = {'False': False, 'None': None, 'True': True, '__builtin__': None, '__builtins__': None, 'set': set}
if PY2:
safe_globals['xrange'] = xrange
else:
safe_globals['range'] = range
def open(*args, **kwargs):
raise NameError("name 'open' is not defined")
safe_globals['open'] = open
if globals:
safe_globals.update(globals)
return eval(expr, safe_globals, locals)
| [
"def",
"safeeval",
"(",
"expr",
",",
"globals",
"=",
"None",
",",
"locals",
"=",
"None",
")",
":",
"safe_globals",
"=",
"{",
"'False'",
":",
"False",
",",
"'None'",
":",
"None",
",",
"'True'",
":",
"True",
",",
"'__builtin__'",
":",
"None",
",",
"'__... | like eval . | train | false |
30,860 | def check_video(video, languages=None, age=None, undefined=False):
if (languages and (not (languages - video.subtitle_languages))):
logger.debug('All languages %r exist', languages)
return False
if (age and (video.age > age)):
logger.debug('Video is older than %r', age)
return False
if (undefined and (Language('und') in video.subtitle_languages)):
logger.debug('Undefined language found')
return False
return True
| [
"def",
"check_video",
"(",
"video",
",",
"languages",
"=",
"None",
",",
"age",
"=",
"None",
",",
"undefined",
"=",
"False",
")",
":",
"if",
"(",
"languages",
"and",
"(",
"not",
"(",
"languages",
"-",
"video",
".",
"subtitle_languages",
")",
")",
")",
... | perform some checks on the video . | train | true |
30,861 | def _check_iterable(arg, msg, **kwargs):
if isinstance(arg, compat.string_types):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
elif (not isinstance(arg, collections.Iterable)):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
elif (iter(arg) is iter(arg)):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
| [
"def",
"_check_iterable",
"(",
"arg",
",",
"msg",
",",
"**",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"arg",
",",
"compat",
".",
"string_types",
")",
":",
"raise",
"exceptions",
".",
"ValidationError",
"(",
"msg",
".",
"format",
"(",
"arg",
"=",
"a... | ensure we have an iterable which is not a string or an iterator . | train | false |
30,862 | def _create_prefix_notification(outgoing_msg, rpc_session):
assert outgoing_msg
path = outgoing_msg.path
assert path
vpn_nlri = path.nlri
assert (path.source is not None)
if (path.source != VRF_TABLE):
params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist, PREFIX: vpn_nlri.prefix, NEXT_HOP: path.nexthop, VPN_LABEL: path.label_list[0], VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family)}]
if (not path.is_withdraw):
rpc_msg = rpc_session.create_notification(NOTIFICATION_ADD_REMOTE_PREFIX, params)
else:
rpc_msg = rpc_session.create_notification(NOTIFICATION_DELETE_REMOTE_PREFIX, params)
else:
params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist, PREFIX: vpn_nlri.prefix, NEXT_HOP: path.nexthop, VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family), ORIGIN_RD: path.origin_rd}]
if (not path.is_withdraw):
rpc_msg = rpc_session.create_notification(NOTIFICATION_ADD_LOCAL_PREFIX, params)
else:
rpc_msg = rpc_session.create_notification(NOTIFICATION_DELETE_LOCAL_PREFIX, params)
return rpc_msg
| [
"def",
"_create_prefix_notification",
"(",
"outgoing_msg",
",",
"rpc_session",
")",
":",
"assert",
"outgoing_msg",
"path",
"=",
"outgoing_msg",
".",
"path",
"assert",
"path",
"vpn_nlri",
"=",
"path",
".",
"nlri",
"assert",
"(",
"path",
".",
"source",
"is",
"no... | constructs prefix notification with data from given outgoing message . | train | true |
30,866 | @pytest.fixture(autouse=True)
def fast_wait(monkeypatch):
old_sleep = gevent.sleep
old_joinall = gevent.joinall
old_killall = gevent.killall
def fast_wait(tm):
return old_sleep(0.1)
def fast_joinall(*args, **kwargs):
if ('timeout' in kwargs):
kwargs['timeout'] = 0.1
return old_joinall(*args, **kwargs)
def fast_killall(*args, **kwargs):
if ('timeout' in kwargs):
kwargs['timeout'] = 0.1
return old_killall(*args, **kwargs)
monkeypatch.setattr(gevent, 'sleep', fast_wait)
monkeypatch.setattr(gevent, 'joinall', fast_joinall)
monkeypatch.setattr(gevent, 'killall', fast_killall)
| [
"@",
"pytest",
".",
"fixture",
"(",
"autouse",
"=",
"True",
")",
"def",
"fast_wait",
"(",
"monkeypatch",
")",
":",
"old_sleep",
"=",
"gevent",
".",
"sleep",
"old_joinall",
"=",
"gevent",
".",
"joinall",
"old_killall",
"=",
"gevent",
".",
"killall",
"def",
... | stub out gevent calls that take timeouts to wait briefly . | train | false |
30,868 | def password_email(user):
from r2.lib.pages import PasswordReset
user_reset_ratelimit = SimpleRateLimit(name=('email_reset_count_%s' % user._id36), seconds=int(datetime.timedelta(hours=12).total_seconds()), limit=3)
if (not user_reset_ratelimit.record_and_check()):
return False
global_reset_ratelimit = SimpleRateLimit(name='email_reset_count_global', seconds=int(datetime.timedelta(hours=1).total_seconds()), limit=1000)
if (not global_reset_ratelimit.record_and_check()):
raise ValueError('password reset ratelimit exceeded')
token = PasswordResetToken._new(user)
base = (g.https_endpoint or g.origin)
passlink = ((base + '/resetpassword/') + token._id)
g.log.info(('Generated password reset link: ' + passlink))
_system_email(user.email, PasswordReset(user=user, passlink=passlink).render(style='email'), Email.Kind.RESET_PASSWORD, user=user)
return True
| [
"def",
"password_email",
"(",
"user",
")",
":",
"from",
"r2",
".",
"lib",
".",
"pages",
"import",
"PasswordReset",
"user_reset_ratelimit",
"=",
"SimpleRateLimit",
"(",
"name",
"=",
"(",
"'email_reset_count_%s'",
"%",
"user",
".",
"_id36",
")",
",",
"seconds",
... | for resetting a users password . | train | false |
30,869 | def crack(receipt):
return map((lambda x: jwt.decode(x.encode('ascii'), verify=False)), receipt.split('~'))
| [
"def",
"crack",
"(",
"receipt",
")",
":",
"return",
"map",
"(",
"(",
"lambda",
"x",
":",
"jwt",
".",
"decode",
"(",
"x",
".",
"encode",
"(",
"'ascii'",
")",
",",
"verify",
"=",
"False",
")",
")",
",",
"receipt",
".",
"split",
"(",
"'~'",
")",
"... | crack open the receipt . | train | false |
30,870 | @step(((STEP_PREFIX + '([A-Z][a-z0-9_ ]*) with ([a-z]+) "([^"]*)"') + ' has(?: an?)? ([A-Z][a-z0-9_ ]*) in the database:'))
def create_models_for_relation(step, rel_model_name, rel_key, rel_value, model):
lookup = {rel_key: rel_value}
rel_model = get_model(rel_model_name).objects.get(**lookup)
for hash_ in step.hashes:
hash_[('%s' % rel_model_name)] = rel_model
write_models_generic(step, model)
| [
"@",
"step",
"(",
"(",
"(",
"STEP_PREFIX",
"+",
"'([A-Z][a-z0-9_ ]*) with ([a-z]+) \"([^\"]*)\"'",
")",
"+",
"' has(?: an?)? ([A-Z][a-z0-9_ ]*) in the database:'",
")",
")",
"def",
"create_models_for_relation",
"(",
"step",
",",
"rel_model_name",
",",
"rel_key",
",",
"rel... | and project with name "ball project" has goals in the database: | description | | to have fun playing with balls of twine | . | train | false |
30,871 | def script(vm_):
return salt.utils.cloud.os_script(config.get_cloud_config_value('script', vm_, __opts__), vm_, __opts__, salt.utils.cloud.salt_config_to_yaml(salt.utils.cloud.minion_config(__opts__, vm_)))
| [
"def",
"script",
"(",
"vm_",
")",
":",
"return",
"salt",
".",
"utils",
".",
"cloud",
".",
"os_script",
"(",
"config",
".",
"get_cloud_config_value",
"(",
"'script'",
",",
"vm_",
",",
"__opts__",
")",
",",
"vm_",
",",
"__opts__",
",",
"salt",
".",
"util... | return the script deployment object . | train | true |
30,872 | def _compute_topic(topic, ctxt, host, instance):
if (not host):
if (not instance):
raise exception.NovaException(_('No compute host specified'))
host = instance['host']
if (not host):
raise exception.NovaException((_('Unable to find host for Instance %s') % instance['uuid']))
return rpc.queue_get_for(ctxt, topic, host)
| [
"def",
"_compute_topic",
"(",
"topic",
",",
"ctxt",
",",
"host",
",",
"instance",
")",
":",
"if",
"(",
"not",
"host",
")",
":",
"if",
"(",
"not",
"instance",
")",
":",
"raise",
"exception",
".",
"NovaException",
"(",
"_",
"(",
"'No compute host specified... | get the topic to use for a message . | train | false |
30,874 | def get_queue_name(queue_name_base, queue_name_suffix, add_random_uuid_to_suffix=True):
if (not queue_name_base):
raise ValueError('Queue name base cannot be empty.')
if (not queue_name_suffix):
return queue_name_base
queue_suffix = queue_name_suffix
if add_random_uuid_to_suffix:
u_hex = uuid.uuid4().hex
uuid_suffix = uuid.uuid4().hex[(len(u_hex) - 10):]
queue_suffix = ('%s-%s' % (queue_name_suffix, uuid_suffix))
queue_name = ('%s.%s' % (queue_name_base, queue_suffix))
return queue_name
| [
"def",
"get_queue_name",
"(",
"queue_name_base",
",",
"queue_name_suffix",
",",
"add_random_uuid_to_suffix",
"=",
"True",
")",
":",
"if",
"(",
"not",
"queue_name_base",
")",
":",
"raise",
"ValueError",
"(",
"'Queue name base cannot be empty.'",
")",
"if",
"(",
"not"... | get a queue name based on base name and suffix . | train | false |
30,877 | def update_course_in_cache(course_key):
return get_block_structure_manager(course_key).update_collected()
| [
"def",
"update_course_in_cache",
"(",
"course_key",
")",
":",
"return",
"get_block_structure_manager",
"(",
"course_key",
")",
".",
"update_collected",
"(",
")"
] | a higher order function implemented on top of the block_structure . | train | false |
30,878 | def batch_cmd_exec(caller):
ptr = caller.ndb.batch_stackptr
stack = caller.ndb.batch_stack
command = stack[ptr]
caller.msg(format_header(caller, command))
try:
caller.execute_cmd(command)
except Exception:
logger.log_trace()
return False
return True
| [
"def",
"batch_cmd_exec",
"(",
"caller",
")",
":",
"ptr",
"=",
"caller",
".",
"ndb",
".",
"batch_stackptr",
"stack",
"=",
"caller",
".",
"ndb",
".",
"batch_stack",
"command",
"=",
"stack",
"[",
"ptr",
"]",
"caller",
".",
"msg",
"(",
"format_header",
"(",
... | helper function for executing a single batch-command entry . | train | false |
30,880 | def release_local(local):
local.__release_local__()
| [
"def",
"release_local",
"(",
"local",
")",
":",
"local",
".",
"__release_local__",
"(",
")"
] | releases the contents of the local for the current context . | train | false |
30,881 | def dehydrate_ratings_body(body_class):
body = body_class()
if (body.label is None):
body.label = slugify_iarc_name(body)
body.name = unicode(body.name)
body.description = unicode(body.description)
return body
| [
"def",
"dehydrate_ratings_body",
"(",
"body_class",
")",
":",
"body",
"=",
"body_class",
"(",
")",
"if",
"(",
"body",
".",
"label",
"is",
"None",
")",
":",
"body",
".",
"label",
"=",
"slugify_iarc_name",
"(",
"body",
")",
"body",
".",
"name",
"=",
"uni... | returns a rating body with translated fields attached . | train | false |
30,883 | def get_reconciler_container_name(obj_timestamp):
(_junk, _junk, ts_meta) = decode_timestamps(obj_timestamp)
return str(((int(ts_meta) // MISPLACED_OBJECTS_CONTAINER_DIVISOR) * MISPLACED_OBJECTS_CONTAINER_DIVISOR))
| [
"def",
"get_reconciler_container_name",
"(",
"obj_timestamp",
")",
":",
"(",
"_junk",
",",
"_junk",
",",
"ts_meta",
")",
"=",
"decode_timestamps",
"(",
"obj_timestamp",
")",
"return",
"str",
"(",
"(",
"(",
"int",
"(",
"ts_meta",
")",
"//",
"MISPLACED_OBJECTS_C... | get the name of a container into which a misplaced object should be enqueued . | train | false |
30,884 | def replaceWith(replStr):
def _replFunc(*args):
return [replStr]
return _replFunc
| [
"def",
"replaceWith",
"(",
"replStr",
")",
":",
"def",
"_replFunc",
"(",
"*",
"args",
")",
":",
"return",
"[",
"replStr",
"]",
"return",
"_replFunc"
] | helper method for common parse actions that simply return a literal value . | train | false |
30,885 | def getSimplifiedLoop(loop, radius):
if (len(loop) < 2):
return loop
simplificationMultiplication = 256
simplificationRadius = (radius / float(simplificationMultiplication))
maximumIndex = (len(loop) * simplificationMultiplication)
pointIndex = 1
while (pointIndex < maximumIndex):
oldLoopLength = len(loop)
loop = getHalfSimplifiedLoop(loop, simplificationRadius, 0)
loop = getHalfSimplifiedLoop(loop, simplificationRadius, 1)
simplificationRadius += simplificationRadius
if (oldLoopLength == len(loop)):
if (simplificationRadius > radius):
return getAwayPoints(loop, radius)
else:
simplificationRadius *= 1.5
simplificationRadius = min(simplificationRadius, radius)
pointIndex += pointIndex
return getAwayPoints(loop, radius)
| [
"def",
"getSimplifiedLoop",
"(",
"loop",
",",
"radius",
")",
":",
"if",
"(",
"len",
"(",
"loop",
")",
"<",
"2",
")",
":",
"return",
"loop",
"simplificationMultiplication",
"=",
"256",
"simplificationRadius",
"=",
"(",
"radius",
"/",
"float",
"(",
"simplifi... | get loop with points inside the channel removed . | train | false |
30,886 | def no_cancel_b_large(b, c, n, DE):
q = Poly(0, DE.t)
while (not c.is_zero):
m = (c.degree(DE.t) - b.degree(DE.t))
if (not (0 <= m <= n)):
raise NonElementaryIntegralException
p = Poly(((c.as_poly(DE.t).LC() / b.as_poly(DE.t).LC()) * (DE.t ** m)), DE.t, expand=False)
q = (q + p)
n = (m - 1)
c = ((c - derivation(p, DE)) - (b * p))
return q
| [
"def",
"no_cancel_b_large",
"(",
"b",
",",
"c",
",",
"n",
",",
"DE",
")",
":",
"q",
"=",
"Poly",
"(",
"0",
",",
"DE",
".",
"t",
")",
"while",
"(",
"not",
"c",
".",
"is_zero",
")",
":",
"m",
"=",
"(",
"c",
".",
"degree",
"(",
"DE",
".",
"t... | poly risch differential equation - no cancellation: deg(b) large enough . | train | false |
30,888 | def delete_router_lport(cluster, lrouter_uuid, lport_uuid):
path = _build_uri_path(LROUTERPORT_RESOURCE, lport_uuid, lrouter_uuid)
try:
do_single_request(HTTP_DELETE, path, cluster=cluster)
except NvpApiClient.ResourceNotFound as e:
LOG.error(_('Logical router not found, Error: %s'), str(e))
raise
LOG.debug(_('Delete logical router port %(lport_uuid)s on logical router %(lrouter_uuid)s'), {'lport_uuid': lport_uuid, 'lrouter_uuid': lrouter_uuid})
| [
"def",
"delete_router_lport",
"(",
"cluster",
",",
"lrouter_uuid",
",",
"lport_uuid",
")",
":",
"path",
"=",
"_build_uri_path",
"(",
"LROUTERPORT_RESOURCE",
",",
"lport_uuid",
",",
"lrouter_uuid",
")",
"try",
":",
"do_single_request",
"(",
"HTTP_DELETE",
",",
"pat... | creates a logical port on the assigned logical router . | train | false |
30,889 | def assure_directory_exists(path, is_file=False):
if is_file:
path = osp.dirname(path)
if (not osp.isdir(path)):
os.makedirs(path)
return True
return False
| [
"def",
"assure_directory_exists",
"(",
"path",
",",
"is_file",
"=",
"False",
")",
":",
"if",
"is_file",
":",
"path",
"=",
"osp",
".",
"dirname",
"(",
"path",
")",
"if",
"(",
"not",
"osp",
".",
"isdir",
"(",
"path",
")",
")",
":",
"os",
".",
"makedi... | assure that the directory pointed to by path exists . | train | true |
30,890 | def get_unused_port_ipv6():
(port, s) = get_unused_port_and_socket_ipv6()
s.close()
return port
| [
"def",
"get_unused_port_ipv6",
"(",
")",
":",
"(",
"port",
",",
"s",
")",
"=",
"get_unused_port_and_socket_ipv6",
"(",
")",
"s",
".",
"close",
"(",
")",
"return",
"port"
] | returns an unused port on localhost on ipv6 . | train | false |
30,891 | def something(TokenClass):
def callback(lexer, match, context):
text = match.group()
if (not text):
return
(yield (match.start(), TokenClass, text))
context.pos = match.end()
return callback
| [
"def",
"something",
"(",
"TokenClass",
")",
":",
"def",
"callback",
"(",
"lexer",
",",
"match",
",",
"context",
")",
":",
"text",
"=",
"match",
".",
"group",
"(",
")",
"if",
"(",
"not",
"text",
")",
":",
"return",
"(",
"yield",
"(",
"match",
".",
... | do not produce empty tokens . | train | true |
30,894 | def csm_data(csm):
return csm_properties(csm)[0]
| [
"def",
"csm_data",
"(",
"csm",
")",
":",
"return",
"csm_properties",
"(",
"csm",
")",
"[",
"0",
"]"
] | return the data field of the sparse variable . | train | false |
30,896 | @testing.requires_testing_data
def test_simulate_sparse_stc_single_hemi():
fwd = read_forward_solution_meg(fname_fwd, force_fixed=True)
n_times = 10
tmin = 0
tstep = 0.001
times = ((np.arange(n_times, dtype=np.float) * tstep) + tmin)
labels_single_hemi = [read_label(op.join(data_path, 'MEG', 'sample', 'labels', ('%s.label' % label))) for label in label_names_single_hemi]
stc_1 = simulate_sparse_stc(fwd['src'], len(labels_single_hemi), times, labels=labels_single_hemi, random_state=0)
assert_true((stc_1.data.shape[0] == len(labels_single_hemi)))
assert_true((stc_1.data.shape[1] == n_times))
stc_2 = simulate_sparse_stc(fwd['src'], len(labels_single_hemi), times, labels=labels_single_hemi, random_state=0)
assert_array_equal(stc_1.lh_vertno, stc_2.lh_vertno)
assert_array_equal(stc_1.rh_vertno, stc_2.rh_vertno)
| [
"@",
"testing",
".",
"requires_testing_data",
"def",
"test_simulate_sparse_stc_single_hemi",
"(",
")",
":",
"fwd",
"=",
"read_forward_solution_meg",
"(",
"fname_fwd",
",",
"force_fixed",
"=",
"True",
")",
"n_times",
"=",
"10",
"tmin",
"=",
"0",
"tstep",
"=",
"0.... | test generation of sparse source estimate . | train | false |
30,897 | def reorder_item():
if (not (frappe.db.a_row_exists('Company') and frappe.db.a_row_exists('Fiscal Year'))):
return
if cint(frappe.db.get_value('Stock Settings', None, 'auto_indent')):
return _reorder_item()
| [
"def",
"reorder_item",
"(",
")",
":",
"if",
"(",
"not",
"(",
"frappe",
".",
"db",
".",
"a_row_exists",
"(",
"'Company'",
")",
"and",
"frappe",
".",
"db",
".",
"a_row_exists",
"(",
"'Fiscal Year'",
")",
")",
")",
":",
"return",
"if",
"cint",
"(",
"fra... | reorder item if stock reaches reorder level . | train | false |
30,899 | def _rstrip(line, JUNK='\n DCTB '):
i = len(line)
while ((i > 0) and (line[(i - 1)] in JUNK)):
i -= 1
return line[:i]
| [
"def",
"_rstrip",
"(",
"line",
",",
"JUNK",
"=",
"'\\n DCTB '",
")",
":",
"i",
"=",
"len",
"(",
"line",
")",
"while",
"(",
"(",
"i",
">",
"0",
")",
"and",
"(",
"line",
"[",
"(",
"i",
"-",
"1",
")",
"]",
"in",
"JUNK",
")",
")",
":",
"i",
... | return line stripped of trailing spaces . | train | false |
30,900 | def setup_platform(hass, config, add_devices, discovery_info=None):
import pyowm
longitude = config.get(CONF_LONGITUDE, round(hass.config.longitude, 5))
latitude = config.get(CONF_LATITUDE, round(hass.config.latitude, 5))
name = config.get(CONF_NAME)
try:
owm = pyowm.OWM(config.get(CONF_API_KEY))
except pyowm.exceptions.api_call_error.APICallError:
_LOGGER.error('Error while connecting to OpenWeatherMap')
return False
data = WeatherData(owm, latitude, longitude)
add_devices([OpenWeatherMapWeather(name, data, hass.config.units.temperature_unit)], True)
| [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_devices",
",",
"discovery_info",
"=",
"None",
")",
":",
"import",
"pyowm",
"longitude",
"=",
"config",
".",
"get",
"(",
"CONF_LONGITUDE",
",",
"round",
"(",
"hass",
".",
"config",
".",
"longitu... | set up the hydroquebec sensor . | train | false |
30,901 | def file_path_to_url(path):
return urljoin('file:', pathname2url(path))
| [
"def",
"file_path_to_url",
"(",
"path",
")",
":",
"return",
"urljoin",
"(",
"'file:'",
",",
"pathname2url",
"(",
"path",
")",
")"
] | converts an absolute native path to a file url . | train | false |
30,902 | def write_cache_time(f, t):
if isinstance(t, int):
t = (t, 0)
elif isinstance(t, float):
(secs, nsecs) = divmod(t, 1.0)
t = (int(secs), int((nsecs * 1000000000)))
elif (not isinstance(t, tuple)):
raise TypeError(t)
f.write(struct.pack('>LL', *t))
| [
"def",
"write_cache_time",
"(",
"f",
",",
"t",
")",
":",
"if",
"isinstance",
"(",
"t",
",",
"int",
")",
":",
"t",
"=",
"(",
"t",
",",
"0",
")",
"elif",
"isinstance",
"(",
"t",
",",
"float",
")",
":",
"(",
"secs",
",",
"nsecs",
")",
"=",
"divm... | write a cache time . | train | false |
30,903 | def _unsalt_cipher_token(token):
salt = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in salt))
secret = ''.join((chars[(x - y)] for (x, y) in pairs))
return secret
| [
"def",
"_unsalt_cipher_token",
"(",
"token",
")",
":",
"salt",
"=",
"token",
"[",
":",
"CSRF_SECRET_LENGTH",
"]",
"token",
"=",
"token",
"[",
"CSRF_SECRET_LENGTH",
":",
"]",
"chars",
"=",
"CSRF_ALLOWED_CHARS",
"pairs",
"=",
"zip",
"(",
"(",
"chars",
".",
"... | given a token . | train | false |
30,905 | def fillfixtures(function):
try:
request = function._request
except AttributeError:
fm = function.session._fixturemanager
fi = fm.getfixtureinfo(function.parent, function.obj, None)
function._fixtureinfo = fi
request = function._request = FixtureRequest(function)
request._fillfixtures()
newfuncargs = {}
for name in fi.argnames:
newfuncargs[name] = function.funcargs[name]
function.funcargs = newfuncargs
else:
request._fillfixtures()
| [
"def",
"fillfixtures",
"(",
"function",
")",
":",
"try",
":",
"request",
"=",
"function",
".",
"_request",
"except",
"AttributeError",
":",
"fm",
"=",
"function",
".",
"session",
".",
"_fixturemanager",
"fi",
"=",
"fm",
".",
"getfixtureinfo",
"(",
"function"... | fill missing funcargs for a test function . | train | false |
30,906 | def _get_cpv(cp, installed=True):
if installed:
return _get_portage().db[portage.root]['vartree'].dep_bestmatch(cp)
else:
return _porttree().dep_bestmatch(cp)
| [
"def",
"_get_cpv",
"(",
"cp",
",",
"installed",
"=",
"True",
")",
":",
"if",
"installed",
":",
"return",
"_get_portage",
"(",
")",
".",
"db",
"[",
"portage",
".",
"root",
"]",
"[",
"'vartree'",
"]",
".",
"dep_bestmatch",
"(",
"cp",
")",
"else",
":",
... | add version to category/package @cp - name of package in format category/name @installed - boolean value . | train | true |
30,907 | def check_name_format(req, name, target_type):
if (not name):
raise HTTPPreconditionFailed(request=req, body=('%s name cannot be empty' % target_type))
if isinstance(name, six.text_type):
name = name.encode('utf-8')
if ('/' in name):
raise HTTPPreconditionFailed(request=req, body=('%s name cannot contain slashes' % target_type))
return name
| [
"def",
"check_name_format",
"(",
"req",
",",
"name",
",",
"target_type",
")",
":",
"if",
"(",
"not",
"name",
")",
":",
"raise",
"HTTPPreconditionFailed",
"(",
"request",
"=",
"req",
",",
"body",
"=",
"(",
"'%s name cannot be empty'",
"%",
"target_type",
")",... | validate that the header contains valid account or container name . | train | false |
30,908 | def attach(zpool, device, new_device, force=False):
ret = {}
dlist = []
if (not exists(zpool)):
ret[zpool] = 'storage pool does not exist'
return ret
ret[zpool] = {}
if (not os.path.exists(device)):
ret[zpool][device] = 'not present on filesystem'
else:
mode = os.stat(device).st_mode
if ((not stat.S_ISBLK(mode)) and (not stat.S_ISREG(mode))):
ret[zpool][device] = 'not a block device, a file vdev or character special device'
if (not os.path.exists(new_device)):
ret[zpool][new_device] = 'not present on filesystem'
else:
mode = os.stat(new_device).st_mode
if ((not stat.S_ISBLK(mode)) and (not stat.S_ISREG(mode))):
ret[zpool][new_device] = 'not a block device, a file vdev or character special device'
if (len(ret[zpool]) > 0):
return ret
zpool_cmd = _check_zpool()
cmd = '{zpool_cmd} attach {force}{zpool} {device} {new_device}'.format(zpool_cmd=zpool_cmd, force=('-f ' if force else ''), zpool=zpool, device=device, new_device=new_device)
res = __salt__['cmd.run_all'](cmd, python_shell=False)
if (res['retcode'] != 0):
ret[zpool] = (res['stderr'] if ('stderr' in res) else res['stdout'])
else:
ret[zpool] = {}
ret[zpool][new_device] = 'attached'
return ret
| [
"def",
"attach",
"(",
"zpool",
",",
"device",
",",
"new_device",
",",
"force",
"=",
"False",
")",
":",
"ret",
"=",
"{",
"}",
"dlist",
"=",
"[",
"]",
"if",
"(",
"not",
"exists",
"(",
"zpool",
")",
")",
":",
"ret",
"[",
"zpool",
"]",
"=",
"'stora... | recursively attach hass to all template instances in list and dict . | train | false |
30,909 | def configure_bgp(net_connect, file_name=''):
try:
output = net_connect.send_config_from_file(config_file=file_name)
if (net_connect.device_type == 'cisco_xr_ssh'):
output += net_connect.commit()
return output
except IOError:
print 'Error reading file: {}'.format(file_name)
| [
"def",
"configure_bgp",
"(",
"net_connect",
",",
"file_name",
"=",
"''",
")",
":",
"try",
":",
"output",
"=",
"net_connect",
".",
"send_config_from_file",
"(",
"config_file",
"=",
"file_name",
")",
"if",
"(",
"net_connect",
".",
"device_type",
"==",
"'cisco_xr... | configure bgp on device . | train | false |
30,910 | def less_simple_linear_interpolation(x, y, xi, extrap=False):
warnings.warn('less_simple_linear_interpolation has been moved to matplotlib.mlab -- please import it from there', DeprecationWarning)
import matplotlib.mlab as mlab
return mlab.less_simple_linear_interpolation(x, y, xi, extrap=extrap)
| [
"def",
"less_simple_linear_interpolation",
"(",
"x",
",",
"y",
",",
"xi",
",",
"extrap",
"=",
"False",
")",
":",
"warnings",
".",
"warn",
"(",
"'less_simple_linear_interpolation has been moved to matplotlib.mlab -- please import it from there'",
",",
"DeprecationWarning",
"... | this function has been moved to matplotlib . | train | false |
30,911 | def set_cache(key, data, ttl=300):
_channel_repository_cache[key] = {'data': data, 'expires': (time.time() + ttl)}
| [
"def",
"set_cache",
"(",
"key",
",",
"data",
",",
"ttl",
"=",
"300",
")",
":",
"_channel_repository_cache",
"[",
"key",
"]",
"=",
"{",
"'data'",
":",
"data",
",",
"'expires'",
":",
"(",
"time",
".",
"time",
"(",
")",
"+",
"ttl",
")",
"}"
] | sets an in-memory cache value . | train | false |
30,912 | def mainthread(func):
@wraps(func)
def delayed_func(*args, **kwargs):
def callback_func(dt):
func(*args, **kwargs)
Clock.schedule_once(callback_func, 0)
return delayed_func
| [
"def",
"mainthread",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"delayed_func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"def",
"callback_func",
"(",
"dt",
")",
":",
"func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"... | decorator that will schedule the call of the function for the next available frame in the mainthread . | train | false |
30,914 | def choose_hostname(hostnames=None, fallback=''):
hostname = fallback
if (hostnames is None):
return hostname
ip_regex = '\\A\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\Z'
ips = [i for i in hostnames if ((i is not None) and isinstance(i, string_types) and re.match(ip_regex, i))]
hosts = [i for i in hostnames if ((i is not None) and (i != '') and (i not in ips))]
for host_list in (hosts, ips):
for host in host_list:
if hostname_valid(host):
return host
return hostname
| [
"def",
"choose_hostname",
"(",
"hostnames",
"=",
"None",
",",
"fallback",
"=",
"''",
")",
":",
"hostname",
"=",
"fallback",
"if",
"(",
"hostnames",
"is",
"None",
")",
":",
"return",
"hostname",
"ip_regex",
"=",
"'\\\\A\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\... | choose a hostname from the provided hostnames given a list of hostnames and a fallback value . | train | false |
30,915 | @register.render_tag
def set_model_permissions(context, token):
model = context[token.split_contents()[1]]
opts = model._meta
perm_name = ((opts.app_label + u'.%s_') + opts.object_name.lower())
request = context[u'request']
setattr(model, u'perms', {})
for perm_type in (u'add', u'change', u'delete'):
model.perms[perm_type] = request.user.has_perm((perm_name % perm_type))
return u''
| [
"@",
"register",
".",
"render_tag",
"def",
"set_model_permissions",
"(",
"context",
",",
"token",
")",
":",
"model",
"=",
"context",
"[",
"token",
".",
"split_contents",
"(",
")",
"[",
"1",
"]",
"]",
"opts",
"=",
"model",
".",
"_meta",
"perm_name",
"=",
... | assigns a permissions dict to the given model . | train | false |
30,916 | @instrumented_task(name='sentry.tasks.clear_expired_resolutions', time_limit=15, soft_time_limit=10)
def clear_expired_resolutions(release_id):
try:
release = Release.objects.get_from_cache(id=release_id)
except Release.DoesNotExist:
return
resolution_list = GroupResolution.objects.filter(release__projects=release.projects.all(), release__date_added__lt=release.date_added).exclude(release=release)
resolution_list.update(status=GroupResolutionStatus.RESOLVED)
for resolution in resolution_list:
activity = Activity.objects.filter(group=resolution.group_id, type=Activity.SET_RESOLVED_IN_RELEASE, ident=resolution.id).order_by('-datetime')[0]
activity.update(data={'version': release.version})
| [
"@",
"instrumented_task",
"(",
"name",
"=",
"'sentry.tasks.clear_expired_resolutions'",
",",
"time_limit",
"=",
"15",
",",
"soft_time_limit",
"=",
"10",
")",
"def",
"clear_expired_resolutions",
"(",
"release_id",
")",
":",
"try",
":",
"release",
"=",
"Release",
".... | this should be fired when release_id is created . | train | false |
30,917 | def change_dict_key(d, from_key, to_key):
try:
d[to_key] = d.pop(from_key)
except KeyError:
pass
| [
"def",
"change_dict_key",
"(",
"d",
",",
"from_key",
",",
"to_key",
")",
":",
"try",
":",
"d",
"[",
"to_key",
"]",
"=",
"d",
".",
"pop",
"(",
"from_key",
")",
"except",
"KeyError",
":",
"pass"
] | changes a dictionarys key from from_key to to_key . | train | false |
30,918 | def test_zipimport_hook(testdir, tmpdir):
zipapp = pytest.importorskip('zipapp')
testdir.tmpdir.join('app').ensure(dir=1)
testdir.makepyfile(**{'app/foo.py': "\n import pytest\n def main():\n pytest.main(['--pyarg', 'foo'])\n "})
target = tmpdir.join('foo.zip')
zipapp.create_archive(str(testdir.tmpdir.join('app')), str(target), main='foo:main')
result = testdir.runpython(target)
assert (result.ret == 0)
result.stderr.fnmatch_lines(['*not found*foo*'])
assert ('INTERNALERROR>' not in result.stdout.str())
| [
"def",
"test_zipimport_hook",
"(",
"testdir",
",",
"tmpdir",
")",
":",
"zipapp",
"=",
"pytest",
".",
"importorskip",
"(",
"'zipapp'",
")",
"testdir",
".",
"tmpdir",
".",
"join",
"(",
"'app'",
")",
".",
"ensure",
"(",
"dir",
"=",
"1",
")",
"testdir",
".... | test package loader is being used correctly . | train | false |
30,919 | def clear_override_for_ccx(ccx, block, name):
try:
CcxFieldOverride.objects.get(ccx=ccx, location=block.location, field=name).delete()
clear_ccx_field_info_from_ccx_map(ccx, block, name)
except CcxFieldOverride.DoesNotExist:
pass
| [
"def",
"clear_override_for_ccx",
"(",
"ccx",
",",
"block",
",",
"name",
")",
":",
"try",
":",
"CcxFieldOverride",
".",
"objects",
".",
"get",
"(",
"ccx",
"=",
"ccx",
",",
"location",
"=",
"block",
".",
"location",
",",
"field",
"=",
"name",
")",
".",
... | clears a previously set field override for the ccx . | train | false |
30,920 | def get_predictions(model, valid_set, time_steps, beam_search=True, num_beams=5):
shape = (valid_set.nbatches, model.be.bsz, time_steps)
if beam_search:
ypred = model.get_outputs_beam(valid_set, num_beams=num_beams)
prediction = ypred.reshape(shape).transpose(1, 0, 2)
else:
ypred = model.get_outputs(valid_set)
prediction = ypred.argmax(2).reshape(shape).transpose(1, 0, 2)
groundtruth = valid_set.X[:, :valid_set.nbatches, ::(-1)]
prediction = prediction[:, :, ::(-1)].flatten()
groundtruth = groundtruth[:, :, ::(-1)].flatten()
return (prediction, groundtruth)
| [
"def",
"get_predictions",
"(",
"model",
",",
"valid_set",
",",
"time_steps",
",",
"beam_search",
"=",
"True",
",",
"num_beams",
"=",
"5",
")",
":",
"shape",
"=",
"(",
"valid_set",
".",
"nbatches",
",",
"model",
".",
"be",
".",
"bsz",
",",
"time_steps",
... | get model outputs for displaying . | train | false |
30,921 | def query_by_primary_key(session, model, pk_value, primary_key=None):
pk_name = (primary_key or primary_key_for(model))
query = session_query(session, model)
return query.filter((getattr(model, pk_name) == pk_value))
| [
"def",
"query_by_primary_key",
"(",
"session",
",",
"model",
",",
"pk_value",
",",
"primary_key",
"=",
"None",
")",
":",
"pk_name",
"=",
"(",
"primary_key",
"or",
"primary_key_for",
"(",
"model",
")",
")",
"query",
"=",
"session_query",
"(",
"session",
",",
... | returns a sqlalchemy query object containing the result of querying model for instances whose primary key has the value pk_value . | train | false |
30,922 | def _CheckForDependency(parent, name):
if _allowCapitalizedNames:
dependents = _dependencyMap.get(parent)
if (not dependents):
uncapitalizedParent = UncapitalizeVmodlName(parent)
dependents = _dependencyMap.get(uncapitalizedParent)
if dependents:
if ((name in dependents) or (Uncapitalize(name) in dependents)):
return True
else:
dependents = _dependencyMap.get(parent)
if dependents:
if (name in dependents):
return True
return False
| [
"def",
"_CheckForDependency",
"(",
"parent",
",",
"name",
")",
":",
"if",
"_allowCapitalizedNames",
":",
"dependents",
"=",
"_dependencyMap",
".",
"get",
"(",
"parent",
")",
"if",
"(",
"not",
"dependents",
")",
":",
"uncapitalizedParent",
"=",
"UncapitalizeVmodl... | note: must be holding the _lazylock . | train | true |
30,923 | @task(aliases=['mongoshell'])
def mongoclient(ctx):
db = settings.DB_NAME
port = settings.DB_PORT
ctx.run('mongo {db} --port {port}'.format(db=db, port=port), pty=True)
| [
"@",
"task",
"(",
"aliases",
"=",
"[",
"'mongoshell'",
"]",
")",
"def",
"mongoclient",
"(",
"ctx",
")",
":",
"db",
"=",
"settings",
".",
"DB_NAME",
"port",
"=",
"settings",
".",
"DB_PORT",
"ctx",
".",
"run",
"(",
"'mongo {db} --port {port}'",
".",
"forma... | run the mongo shell for the osf database . | train | false |
30,924 | def test_test_function():
from ... import numpy as anp
assert (GE1P10(module=anp) is True)
if GE1P10(module=np):
assert (broadcast_arrays is np.broadcast_arrays)
assert (broadcast_to is np.broadcast_to)
else:
assert (broadcast_arrays is not np.broadcast_arrays)
assert (not hasattr(np, u'broadcast_to'))
| [
"def",
"test_test_function",
"(",
")",
":",
"from",
"...",
"import",
"numpy",
"as",
"anp",
"assert",
"(",
"GE1P10",
"(",
"module",
"=",
"anp",
")",
"is",
"True",
")",
"if",
"GE1P10",
"(",
"module",
"=",
"np",
")",
":",
"assert",
"(",
"broadcast_arrays"... | test the test function the possibly patched version of broadcast_arrays should always be ok the numpy version may be . | train | false |
30,926 | def _log_and_ignore_exceptions(f):
def wrapped(self, *args, **dargs):
try:
return f(self, *args, **dargs)
except Exception as e:
print ('LogfileMonitor.%s failed with exception %s' % (f.__name__, e))
print 'Exception ignored:'
traceback.print_exc(file=sys.stdout)
wrapped.__name__ = f.__name__
wrapped.__doc__ = f.__doc__
wrapped.__dict__.update(f.__dict__)
return wrapped
| [
"def",
"_log_and_ignore_exceptions",
"(",
"f",
")",
":",
"def",
"wrapped",
"(",
"self",
",",
"*",
"args",
",",
"**",
"dargs",
")",
":",
"try",
":",
"return",
"f",
"(",
"self",
",",
"*",
"args",
",",
"**",
"dargs",
")",
"except",
"Exception",
"as",
... | decorator: automatically log exception during a method call . | train | false |
30,927 | def vk_api(backend, method, data):
data['v'] = backend.setting('API_VERSION', '3.0')
if ('access_token' not in data):
(key, secret) = backend.get_key_and_secret()
if ('api_id' not in data):
data['api_id'] = key
data['method'] = method
data['format'] = 'json'
url = 'http://api.vk.com/api.php'
param_list = sorted(list((((item + '=') + data[item]) for item in data)))
data['sig'] = md5((''.join(param_list) + secret).encode('utf-8')).hexdigest()
else:
url = ('https://api.vk.com/method/' + method)
try:
return backend.get_json(url, params=data)
except (TypeError, KeyError, IOError, ValueError, IndexError):
return None
| [
"def",
"vk_api",
"(",
"backend",
",",
"method",
",",
"data",
")",
":",
"data",
"[",
"'v'",
"]",
"=",
"backend",
".",
"setting",
"(",
"'API_VERSION'",
",",
"'3.0'",
")",
"if",
"(",
"'access_token'",
"not",
"in",
"data",
")",
":",
"(",
"key",
",",
"s... | calls vk . | train | false |
30,929 | def get_signed_params(params):
if (not isinstance(params, basestring)):
params = urllib.urlencode(params)
signature = hmac.new(params)
signature.update(get_secret())
return ((params + u'&_signature=') + signature.hexdigest())
| [
"def",
"get_signed_params",
"(",
"params",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"params",
",",
"basestring",
")",
")",
":",
"params",
"=",
"urllib",
".",
"urlencode",
"(",
"params",
")",
"signature",
"=",
"hmac",
".",
"new",
"(",
"params",
")... | sign a url by appending &_signature=xxxxx to given params . | train | false |
30,932 | def equateSphericalDotAzimuth(point, returnValue):
azimuthComplex = (euclidean.getWiddershinsUnitPolar(math.radians(returnValue)) * abs(point.dropAxis()))
point.x = azimuthComplex.real
point.y = azimuthComplex.imag
| [
"def",
"equateSphericalDotAzimuth",
"(",
"point",
",",
"returnValue",
")",
":",
"azimuthComplex",
"=",
"(",
"euclidean",
".",
"getWiddershinsUnitPolar",
"(",
"math",
".",
"radians",
"(",
"returnValue",
")",
")",
"*",
"abs",
"(",
"point",
".",
"dropAxis",
"(",
... | get equation for spherical azimuth . | train | false |
30,933 | @environmentfilter
def do_wordwrap(environment, s, width=79, break_long_words=True, wrapstring=None):
if (not wrapstring):
wrapstring = environment.newline_sequence
import textwrap
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False, replace_whitespace=False, break_long_words=break_long_words))
| [
"@",
"environmentfilter",
"def",
"do_wordwrap",
"(",
"environment",
",",
"s",
",",
"width",
"=",
"79",
",",
"break_long_words",
"=",
"True",
",",
"wrapstring",
"=",
"None",
")",
":",
"if",
"(",
"not",
"wrapstring",
")",
":",
"wrapstring",
"=",
"environment... | return a copy of the string passed to the filter wrapped after 79 characters . | train | true |
30,934 | def get_spyderplugins_mods(io=False):
user_plugin_path = osp.join(get_conf_path(), USER_PLUGIN_DIR)
if (not osp.isdir(user_plugin_path)):
os.makedirs(user_plugin_path)
(modlist, modnames) = ([], [])
for plugin_path in ([user_plugin_path] + sys.path):
_get_spyderplugins(plugin_path, io, modnames, modlist)
return modlist
| [
"def",
"get_spyderplugins_mods",
"(",
"io",
"=",
"False",
")",
":",
"user_plugin_path",
"=",
"osp",
".",
"join",
"(",
"get_conf_path",
"(",
")",
",",
"USER_PLUGIN_DIR",
")",
"if",
"(",
"not",
"osp",
".",
"isdir",
"(",
"user_plugin_path",
")",
")",
":",
"... | import modules from plugins package and return the list . | train | true |
30,935 | def split_and_strip_non_empty_lines(text):
return [line.strip() for line in text.splitlines() if line.strip()]
| [
"def",
"split_and_strip_non_empty_lines",
"(",
"text",
")",
":",
"return",
"[",
"line",
".",
"strip",
"(",
")",
"for",
"line",
"in",
"text",
".",
"splitlines",
"(",
")",
"if",
"line",
".",
"strip",
"(",
")",
"]"
] | return lines split by newline . | train | false |
30,937 | def path_from_string(string):
if (not string):
return []
path = PATH_STRING_SEPARATOR.split(string)
path = [_path_part_unescape(v) for v in path]
return path
| [
"def",
"path_from_string",
"(",
"string",
")",
":",
"if",
"(",
"not",
"string",
")",
":",
"return",
"[",
"]",
"path",
"=",
"PATH_STRING_SEPARATOR",
".",
"split",
"(",
"string",
")",
"path",
"=",
"[",
"_path_part_unescape",
"(",
"v",
")",
"for",
"v",
"i... | returns a dimension point path from string . | train | false |
30,938 | def dump_mrjob_conf(conf, f):
if yaml:
_dump_yaml_with_clear_tags(conf, f, default_flow_style=False)
else:
json.dump(conf, f, indent=2)
f.flush()
| [
"def",
"dump_mrjob_conf",
"(",
"conf",
",",
"f",
")",
":",
"if",
"yaml",
":",
"_dump_yaml_with_clear_tags",
"(",
"conf",
",",
"f",
",",
"default_flow_style",
"=",
"False",
")",
"else",
":",
"json",
".",
"dump",
"(",
"conf",
",",
"f",
",",
"indent",
"="... | write out configuration options to a file . | train | false |
30,940 | def get_migrate_repo_path():
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'migrate_repo')
assert os.path.exists(path)
return path
| [
"def",
"get_migrate_repo_path",
"(",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
",",
"'migrate_repo'",
")",
"assert",
"os",
".",
... | get the path for the migrate repository . | train | false |
30,941 | def decode_password(pw, name):
decPW = ''
if (pw and pw.startswith(__PW_PREFIX)):
for n in range(len(__PW_PREFIX), len(pw), 2):
try:
ch = chr(int((pw[n] + pw[(n + 1)]), 16))
except ValueError:
logging.error(T('Incorrectly encoded password %s'), name)
return ''
decPW += ch
return decPW
else:
return pw
| [
"def",
"decode_password",
"(",
"pw",
",",
"name",
")",
":",
"decPW",
"=",
"''",
"if",
"(",
"pw",
"and",
"pw",
".",
"startswith",
"(",
"__PW_PREFIX",
")",
")",
":",
"for",
"n",
"in",
"range",
"(",
"len",
"(",
"__PW_PREFIX",
")",
",",
"len",
"(",
"... | decode hexadecimal encoded password but only decode when prefixed . | train | false |
30,942 | def freemem():
return utils.run('sync && echo 3 > /proc/sys/vm/drop_caches')
| [
"def",
"freemem",
"(",
")",
":",
"return",
"utils",
".",
"run",
"(",
"'sync && echo 3 > /proc/sys/vm/drop_caches'",
")"
] | return an int representing the amount of memory that has not been given to virtual machines on this node cli example: . | train | false |
30,943 | def uncommented_lines(filename, use_sudo=False):
func = (run_as_root if use_sudo else run)
res = func(('cat %s' % quote(filename)), quiet=True)
if res.succeeded:
return [line for line in res.splitlines() if (line and (not line.startswith('#')))]
else:
return []
| [
"def",
"uncommented_lines",
"(",
"filename",
",",
"use_sudo",
"=",
"False",
")",
":",
"func",
"=",
"(",
"run_as_root",
"if",
"use_sudo",
"else",
"run",
")",
"res",
"=",
"func",
"(",
"(",
"'cat %s'",
"%",
"quote",
"(",
"filename",
")",
")",
",",
"quiet"... | get the lines of a remote file . | train | true |
30,944 | def version_sort(sortable, *fields, **kwargs):
def _version_sort_key(item):
result = SemVer(semver_compat(item))
if fields:
values = [result]
for field in fields:
values.append(item[field])
result = tuple(values)
return result
try:
return sorted(sortable, key=_version_sort_key, **kwargs)
except ValueError as e:
console_write(u'\n Error sorting versions - %s\n ', e)
return []
| [
"def",
"version_sort",
"(",
"sortable",
",",
"*",
"fields",
",",
"**",
"kwargs",
")",
":",
"def",
"_version_sort_key",
"(",
"item",
")",
":",
"result",
"=",
"SemVer",
"(",
"semver_compat",
"(",
"item",
")",
")",
"if",
"fields",
":",
"values",
"=",
"[",... | sorts a list that is a list of versions . | train | false |
30,945 | def getFileOrGcodeDirectory(fileName, wasCancelled, words=[]):
if isEmptyOrCancelled(fileName, wasCancelled):
return []
if isDirectorySetting():
dotIndex = fileName.rfind('.')
if (dotIndex < 0):
print 'The file name should have a suffix, like myfile.xml.'
print 'Since the file name does not have a suffix, nothing will be done'
suffix = fileName[(dotIndex + 1):]
return archive.getFilesWithFileTypeWithoutWords(suffix, words, fileName)
return [fileName]
| [
"def",
"getFileOrGcodeDirectory",
"(",
"fileName",
",",
"wasCancelled",
",",
"words",
"=",
"[",
"]",
")",
":",
"if",
"isEmptyOrCancelled",
"(",
"fileName",
",",
"wasCancelled",
")",
":",
"return",
"[",
"]",
"if",
"isDirectorySetting",
"(",
")",
":",
"dotInde... | get the gcode files in the directory the file is in if directory setting is true . | train | false |
30,946 | def _process_old_opts(configs):
if isinstance(configs, tuple):
configs = [configs]
return {label: options for (label, options) in configs}
| [
"def",
"_process_old_opts",
"(",
"configs",
")",
":",
"if",
"isinstance",
"(",
"configs",
",",
"tuple",
")",
":",
"configs",
"=",
"[",
"configs",
"]",
"return",
"{",
"label",
":",
"options",
"for",
"(",
"label",
",",
"options",
")",
"in",
"configs",
"}... | convert old-style 2-tuple configs to dicts . | train | false |
30,947 | def _pnio_rtc_guess_payload_class(_pkt, _underlayer=None, *args, **kargs):
config = pnio_get_config(_underlayer)
if isinstance(config, list):
cur_index = (- len(_pkt))
for (index, cls, params) in config:
if (cur_index == index):
return cls(_pkt, config=params, *args, **kargs)
return PNIORealTimeIOxS(_pkt, *args, **kargs)
else:
return PNIORealTimeRawData(_pkt, config={'length': len(_pkt)}, *args, **kargs)
| [
"def",
"_pnio_rtc_guess_payload_class",
"(",
"_pkt",
",",
"_underlayer",
"=",
"None",
",",
"*",
"args",
",",
"**",
"kargs",
")",
":",
"config",
"=",
"pnio_get_config",
"(",
"_underlayer",
")",
"if",
"isinstance",
"(",
"config",
",",
"list",
")",
":",
"cur_... | a dispatcher for the packet list field which manage the configuration to fin dthe appropriate class . | train | false |
30,948 | @pytest.fixture(scope='module', params=['cpu'])
def backend_cpu64(request):
be = get_backend(request, datatype=np.float64)
def cleanup():
be = request.getfuncargvalue('backend_cpu64')
del be
request.addfinalizer(cleanup)
return be
| [
"@",
"pytest",
".",
"fixture",
"(",
"scope",
"=",
"'module'",
",",
"params",
"=",
"[",
"'cpu'",
"]",
")",
"def",
"backend_cpu64",
"(",
"request",
")",
":",
"be",
"=",
"get_backend",
"(",
"request",
",",
"datatype",
"=",
"np",
".",
"float64",
")",
"de... | fixture that returns a cpu backend using 64 bit dtype . | train | false |
30,950 | def has_purchased(f):
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
if (addon.is_premium() and (not addon.has_purchased(request.user))):
log.info(('Not purchased: %d' % addon.pk))
raise PermissionDenied
return f(request, addon, *args, **kw)
return wrapper
| [
"def",
"has_purchased",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"request",
",",
"addon",
",",
"*",
"args",
",",
"**",
"kw",
")",
":",
"if",
"(",
"addon",
".",
"is_premium",
"(",
")",
"and",
"(",
... | if the addon is premium . | train | false |
30,951 | def intersection(u, v):
w = []
for e in u:
if (e in v):
w.append(e)
return w
| [
"def",
"intersection",
"(",
"u",
",",
"v",
")",
":",
"w",
"=",
"[",
"]",
"for",
"e",
"in",
"u",
":",
"if",
"(",
"e",
"in",
"v",
")",
":",
"w",
".",
"append",
"(",
"e",
")",
"return",
"w"
] | return the intersection of _u_ and _v_ . | train | false |
30,956 | def is_whitelisted_host(host):
whitelist = config.get('content', 'host-blocking-whitelist')
if (whitelist is None):
return False
for pattern in whitelist:
if fnmatch.fnmatch(host, pattern.lower()):
return True
return False
| [
"def",
"is_whitelisted_host",
"(",
"host",
")",
":",
"whitelist",
"=",
"config",
".",
"get",
"(",
"'content'",
",",
"'host-blocking-whitelist'",
")",
"if",
"(",
"whitelist",
"is",
"None",
")",
":",
"return",
"False",
"for",
"pattern",
"in",
"whitelist",
":",... | check if the given host is on the adblock whitelist . | train | false |
30,957 | def write_fft(fft_features, fn):
(base_fn, ext) = os.path.splitext(fn)
data_fn = (base_fn + '.fft')
np.save(data_fn, fft_features)
print ('Written ' % data_fn)
| [
"def",
"write_fft",
"(",
"fft_features",
",",
"fn",
")",
":",
"(",
"base_fn",
",",
"ext",
")",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fn",
")",
"data_fn",
"=",
"(",
"base_fn",
"+",
"'.fft'",
")",
"np",
".",
"save",
"(",
"data_fn",
",",
"f... | write the fft features to separate files to speed up processing . | train | false |
30,958 | def guess_locale_from_lang_posix(lang):
if is_valid_locale(str(lang)):
locale_n = str(lang)
else:
locale_n = str((locale.normalize(lang).split(u'.')[0] + u'.UTF-8'))
if (not is_valid_locale(locale_n)):
locale_n = str(locale.normalize(lang).split(u'.')[0])
if (not is_valid_locale(locale_n)):
locale_n = None
return locale_n
| [
"def",
"guess_locale_from_lang_posix",
"(",
"lang",
")",
":",
"if",
"is_valid_locale",
"(",
"str",
"(",
"lang",
")",
")",
":",
"locale_n",
"=",
"str",
"(",
"lang",
")",
"else",
":",
"locale_n",
"=",
"str",
"(",
"(",
"locale",
".",
"normalize",
"(",
"la... | guess a locale . | train | false |
30,959 | @require_admin_context
def network_associate(context, project_id, network_id=None, force=False):
session = get_session()
with session.begin():
def network_query(project_filter, id=None):
filter_kwargs = {'project_id': project_filter}
if (id is not None):
filter_kwargs['id'] = id
return model_query(context, models.Network, session=session, read_deleted='no').filter_by(**filter_kwargs).with_lockmode('update').first()
if (not force):
network_ref = network_query(project_id)
if (force or (not network_ref)):
network_ref = network_query(None, network_id)
if (not network_ref):
raise db.NoMoreNetworks()
network_ref['project_id'] = project_id
session.add(network_ref)
return network_ref
| [
"@",
"require_admin_context",
"def",
"network_associate",
"(",
"context",
",",
"project_id",
",",
"network_id",
"=",
"None",
",",
"force",
"=",
"False",
")",
":",
"session",
"=",
"get_session",
"(",
")",
"with",
"session",
".",
"begin",
"(",
")",
":",
"def... | associate a project with a network . | train | false |
30,960 | def treePing64():
results = {}
switches = {'reference user': UserSwitch, 'Open vSwitch kernel': OVSKernelSwitch}
for name in switches:
info('*** Testing', name, 'datapath\n')
switch = switches[name]
network = TreeNet(depth=2, fanout=8, switch=switch)
result = network.run(network.pingAll)
results[name] = result
info('\n*** Tree network ping results:\n')
for name in switches:
info(('%s: %d%% packet loss\n' % (name, results[name])))
info('\n')
| [
"def",
"treePing64",
"(",
")",
":",
"results",
"=",
"{",
"}",
"switches",
"=",
"{",
"'reference user'",
":",
"UserSwitch",
",",
"'Open vSwitch kernel'",
":",
"OVSKernelSwitch",
"}",
"for",
"name",
"in",
"switches",
":",
"info",
"(",
"'*** Testing'",
",",
"na... | run ping test on 64-node tree networks . | train | false |
30,961 | def pillar_pcre(tgt, delimiter=DEFAULT_TARGET_DELIM):
matcher = salt.minion.Matcher({'pillar': __pillar__}, __salt__)
try:
return matcher.pillar_pcre_match(tgt, delimiter=delimiter)
except Exception as exc:
log.exception(exc)
return False
| [
"def",
"pillar_pcre",
"(",
"tgt",
",",
"delimiter",
"=",
"DEFAULT_TARGET_DELIM",
")",
":",
"matcher",
"=",
"salt",
".",
"minion",
".",
"Matcher",
"(",
"{",
"'pillar'",
":",
"__pillar__",
"}",
",",
"__salt__",
")",
"try",
":",
"return",
"matcher",
".",
"p... | return true if the minion matches the given pillar_pcre target . | train | false |
30,962 | def translate_peers_into_health(peer_info_dicts):
upload_only = 0
finished = 0
unfinished_able_dl = 0
interest_in_us = 0
for p_info in peer_info_dicts:
upload_only_b = False
if p_info['upload_only']:
upload_only += 1
upload_only_b = True
if p_info['uinterested']:
interest_in_us += 1
if (p_info['completed'] == 1):
finished += 1
else:
unfinished_able_dl += (1 if upload_only_b else 0)
num_seeders = max(upload_only, finished)
num_leech = max(interest_in_us, min(unfinished_able_dl, (len(peer_info_dicts) - finished)))
return (num_seeders, num_leech)
| [
"def",
"translate_peers_into_health",
"(",
"peer_info_dicts",
")",
":",
"upload_only",
"=",
"0",
"finished",
"=",
"0",
"unfinished_able_dl",
"=",
"0",
"interest_in_us",
"=",
"0",
"for",
"p_info",
"in",
"peer_info_dicts",
":",
"upload_only_b",
"=",
"False",
"if",
... | peer_info_dicts is a peer_info dictionary from libtorrentdownloadimpl . | train | false |
30,963 | def sync_pillar(saltenv='base'):
return salt.utils.extmods.sync(__opts__, 'pillar', saltenv=saltenv)[0]
| [
"def",
"sync_pillar",
"(",
"saltenv",
"=",
"'base'",
")",
":",
"return",
"salt",
".",
"utils",
".",
"extmods",
".",
"sync",
"(",
"__opts__",
",",
"'pillar'",
",",
"saltenv",
"=",
"saltenv",
")",
"[",
"0",
"]"
] | sync pillar modules from salt://_pillar to the master saltenv : base the fileserver environment from which to sync . | train | false |
30,964 | def pci_device_get_by_id(context, id):
return IMPL.pci_device_get_by_id(context, id)
| [
"def",
"pci_device_get_by_id",
"(",
"context",
",",
"id",
")",
":",
"return",
"IMPL",
".",
"pci_device_get_by_id",
"(",
"context",
",",
"id",
")"
] | get pci device by id . | train | false |
30,965 | def word_wrap(text, tabwidth, limit, break_on_hyphens=False):
lines = []
special_tag_rgx = re.compile(u"^(((Acked-by|Ack'd-by|Based-on-patch-by|Cheered-on-by|Co-authored-by|Comments-by|Confirmed-by|Contributions-by|Debugged-by|Discovered-by|Explained-by|Backtraced-by|Helped-by|Liked-by|Link|Improved-by|Inspired-by|Initial-patch-by|Noticed-by|Original-patch-by|Originally-by|Mentored-by|Patch-by|Proposed-by|References|Related-to|Reported-by|Requested-by|Reviewed-by|See-also|Signed-off-by|Signed-Off-by|Spotted-by|Suggested-by|Tested-by|Tested-on-([a-zA-Z-_]+)-by|With-suggestions-by):)|([Cc]\\.\\s*[Ff]\\.\\s+))")
w = TextWrapper(width=limit, tabwidth=tabwidth, break_on_hyphens=break_on_hyphens, drop_whitespace=True)
for line in text.split(u'\n'):
if special_tag_rgx.match(line):
lines.append(line)
else:
lines.append(w.fill(line))
return u'\n'.join(lines)
| [
"def",
"word_wrap",
"(",
"text",
",",
"tabwidth",
",",
"limit",
",",
"break_on_hyphens",
"=",
"False",
")",
":",
"lines",
"=",
"[",
"]",
"special_tag_rgx",
"=",
"re",
".",
"compile",
"(",
"u\"^(((Acked-by|Ack'd-by|Based-on-patch-by|Cheered-on-by|Co-authored-by|Comment... | wrap long lines to the specified limit . | train | false |
30,966 | def mask_not_alphanumeric(data):
return mask_not_printable(data, NON_FANCY_PRINTABLE)
| [
"def",
"mask_not_alphanumeric",
"(",
"data",
")",
":",
"return",
"mask_not_printable",
"(",
"data",
",",
"NON_FANCY_PRINTABLE",
")"
] | same as above . | train | false |
30,967 | def get_url(uri=None, full_address=False):
host_name = (frappe.local.conf.host_name or frappe.local.conf.hostname)
if (uri and (uri.startswith(u'http://') or uri.startswith(u'https://'))):
return uri
if (not host_name):
if (hasattr(frappe.local, u'request') and frappe.local.request and frappe.local.request.host):
protocol = (u'https://' if (u'https' == frappe.get_request_header(u'X-Forwarded-Proto', u'')) else u'http://')
host_name = (protocol + frappe.local.request.host)
elif frappe.local.site:
protocol = u'http://'
if frappe.local.conf.ssl_certificate:
protocol = u'https://'
elif frappe.local.conf.wildcard:
domain = frappe.local.conf.wildcard.get(u'domain')
if (domain and frappe.local.site.endswith(domain) and frappe.local.conf.wildcard.get(u'ssl_certificate')):
protocol = u'https://'
host_name = (protocol + frappe.local.site)
else:
host_name = frappe.db.get_value(u'Website Settings', u'Website Settings', u'subdomain')
if (not host_name):
host_name = u'http://localhost'
if (host_name and (not (host_name.startswith(u'http://') or host_name.startswith(u'https://')))):
host_name = (u'http://' + host_name)
if ((not uri) and full_address):
uri = frappe.get_request_header(u'REQUEST_URI', u'')
url = (urllib.basejoin(host_name, uri) if uri else host_name)
return url
| [
"def",
"get_url",
"(",
"uri",
"=",
"None",
",",
"full_address",
"=",
"False",
")",
":",
"host_name",
"=",
"(",
"frappe",
".",
"local",
".",
"conf",
".",
"host_name",
"or",
"frappe",
".",
"local",
".",
"conf",
".",
"hostname",
")",
"if",
"(",
"uri",
... | lookup and return page url . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.