id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
2,424
|
def turn_off_internet(verbose=False):
global INTERNET_OFF
global _orig_opener
if INTERNET_OFF:
return
INTERNET_OFF = True
__tracebackhide__ = True
if verbose:
print(u'Internet access disabled')
_orig_opener = urllib.request.build_opener()
no_proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(no_proxy_handler)
urllib.request.install_opener(opener)
socket.create_connection = check_internet_off(socket_create_connection)
socket.socket.bind = check_internet_off(socket_bind)
socket.socket.connect = check_internet_off(socket_connect)
return socket
|
[
"def",
"turn_off_internet",
"(",
"verbose",
"=",
"False",
")",
":",
"global",
"INTERNET_OFF",
"global",
"_orig_opener",
"if",
"INTERNET_OFF",
":",
"return",
"INTERNET_OFF",
"=",
"True",
"__tracebackhide__",
"=",
"True",
"if",
"verbose",
":",
"print",
"(",
"u'Internet access disabled'",
")",
"_orig_opener",
"=",
"urllib",
".",
"request",
".",
"build_opener",
"(",
")",
"no_proxy_handler",
"=",
"urllib",
".",
"request",
".",
"ProxyHandler",
"(",
"{",
"}",
")",
"opener",
"=",
"urllib",
".",
"request",
".",
"build_opener",
"(",
"no_proxy_handler",
")",
"urllib",
".",
"request",
".",
"install_opener",
"(",
"opener",
")",
"socket",
".",
"create_connection",
"=",
"check_internet_off",
"(",
"socket_create_connection",
")",
"socket",
".",
"socket",
".",
"bind",
"=",
"check_internet_off",
"(",
"socket_bind",
")",
"socket",
".",
"socket",
".",
"connect",
"=",
"check_internet_off",
"(",
"socket_connect",
")",
"return",
"socket"
] |
disable internet access via python by preventing connections from being created using the socket module .
|
train
| false
|
2,425
|
def all(x, axis=None, keepdims=False):
axis = _normalize_axis(axis, ndim(x))
x = tf.cast(x, tf.bool)
x = tf.reduce_all(x, reduction_indices=axis, keep_dims=keepdims)
return tf.cast(x, tf.uint8)
|
[
"def",
"all",
"(",
"x",
",",
"axis",
"=",
"None",
",",
"keepdims",
"=",
"False",
")",
":",
"axis",
"=",
"_normalize_axis",
"(",
"axis",
",",
"ndim",
"(",
"x",
")",
")",
"x",
"=",
"tf",
".",
"cast",
"(",
"x",
",",
"tf",
".",
"bool",
")",
"x",
"=",
"tf",
".",
"reduce_all",
"(",
"x",
",",
"reduction_indices",
"=",
"axis",
",",
"keep_dims",
"=",
"keepdims",
")",
"return",
"tf",
".",
"cast",
"(",
"x",
",",
"tf",
".",
"uint8",
")"
] |
return all registred migrations .
|
train
| false
|
2,426
|
def _require_language(code, fullname, plurals=2, plural_equation='(n != 1)'):
from pootle_language.models import Language
criteria = {'code': code, 'fullname': fullname, 'nplurals': plurals, 'pluralequation': plural_equation}
(language, created) = Language.objects.get_or_create(**criteria)
if created:
language.save()
return language
|
[
"def",
"_require_language",
"(",
"code",
",",
"fullname",
",",
"plurals",
"=",
"2",
",",
"plural_equation",
"=",
"'(n != 1)'",
")",
":",
"from",
"pootle_language",
".",
"models",
"import",
"Language",
"criteria",
"=",
"{",
"'code'",
":",
"code",
",",
"'fullname'",
":",
"fullname",
",",
"'nplurals'",
":",
"plurals",
",",
"'pluralequation'",
":",
"plural_equation",
"}",
"(",
"language",
",",
"created",
")",
"=",
"Language",
".",
"objects",
".",
"get_or_create",
"(",
"**",
"criteria",
")",
"if",
"created",
":",
"language",
".",
"save",
"(",
")",
"return",
"language"
] |
helper to get/create a new language .
|
train
| false
|
2,428
|
def _pre_index_check(handler, host=None, core_name=None):
if ((_get_none_or_value(host) is None) and (not _is_master())):
err = ['solr.pre_indexing_check can only be called by "master" minions']
return _get_return_dict(False, err)
if ((_get_none_or_value(core_name) is None) and _check_for_cores()):
errors = ['solr.full_import is not safe to multiple handlers at once']
return _get_return_dict(False, errors=errors)
resp = import_status(handler, host, core_name)
if resp['success']:
status = resp['data']['status']
if (status == 'busy'):
warn = ['An indexing process is already running.']
return _get_return_dict(True, warnings=warn)
if (status != 'idle'):
errors = ['Unknown status: "{0}"'.format(status)]
return _get_return_dict(False, data=resp['data'], errors=errors)
else:
errors = ['Status check failed. Response details: {0}'.format(resp)]
return _get_return_dict(False, data=resp['data'], errors=errors)
return resp
|
[
"def",
"_pre_index_check",
"(",
"handler",
",",
"host",
"=",
"None",
",",
"core_name",
"=",
"None",
")",
":",
"if",
"(",
"(",
"_get_none_or_value",
"(",
"host",
")",
"is",
"None",
")",
"and",
"(",
"not",
"_is_master",
"(",
")",
")",
")",
":",
"err",
"=",
"[",
"'solr.pre_indexing_check can only be called by \"master\" minions'",
"]",
"return",
"_get_return_dict",
"(",
"False",
",",
"err",
")",
"if",
"(",
"(",
"_get_none_or_value",
"(",
"core_name",
")",
"is",
"None",
")",
"and",
"_check_for_cores",
"(",
")",
")",
":",
"errors",
"=",
"[",
"'solr.full_import is not safe to multiple handlers at once'",
"]",
"return",
"_get_return_dict",
"(",
"False",
",",
"errors",
"=",
"errors",
")",
"resp",
"=",
"import_status",
"(",
"handler",
",",
"host",
",",
"core_name",
")",
"if",
"resp",
"[",
"'success'",
"]",
":",
"status",
"=",
"resp",
"[",
"'data'",
"]",
"[",
"'status'",
"]",
"if",
"(",
"status",
"==",
"'busy'",
")",
":",
"warn",
"=",
"[",
"'An indexing process is already running.'",
"]",
"return",
"_get_return_dict",
"(",
"True",
",",
"warnings",
"=",
"warn",
")",
"if",
"(",
"status",
"!=",
"'idle'",
")",
":",
"errors",
"=",
"[",
"'Unknown status: \"{0}\"'",
".",
"format",
"(",
"status",
")",
"]",
"return",
"_get_return_dict",
"(",
"False",
",",
"data",
"=",
"resp",
"[",
"'data'",
"]",
",",
"errors",
"=",
"errors",
")",
"else",
":",
"errors",
"=",
"[",
"'Status check failed. Response details: {0}'",
".",
"format",
"(",
"resp",
")",
"]",
"return",
"_get_return_dict",
"(",
"False",
",",
"data",
"=",
"resp",
"[",
"'data'",
"]",
",",
"errors",
"=",
"errors",
")",
"return",
"resp"
] |
private method - master call does a pre-check to make sure that all the options are set and that we can talk to solr before trying to send a command to solr .
|
train
| true
|
2,429
|
def _TestUpdateDevice(tester, user_cookie, request_dict):
validator = tester.validator
(user_id, device_id) = tester.GetIdsFromCookie(user_cookie)
request_dict = deepcopy(request_dict)
device_dict = request_dict['device_dict']
actual_dict = tester.SendRequest('update_device', user_cookie, request_dict)
device_dict['user_id'] = user_id
if ('push_token' in device_dict):
device_dict['alert_user_id'] = user_id
device_dict.pop('device_uuid', None)
device_dict.pop('test_udid', None)
device = validator.ValidateUpdateDBObject(Device, last_access=util._TEST_TIME, **device_dict)
if ('push_token' in device_dict):
predicate = (lambda d: ((d.push_token == device_dict['push_token']) and (d.device_id != device_dict['device_id'])))
for other_device in validator.QueryModelObjects(Device, predicate=predicate):
validator.ValidateUpdateDBObject(Device, user_id=other_device.user_id, device_id=other_device.device_id, push_token=None, alert_user_id=None)
tester._CompareResponseDicts('update_device', user_id, request_dict, {}, actual_dict)
return actual_dict
|
[
"def",
"_TestUpdateDevice",
"(",
"tester",
",",
"user_cookie",
",",
"request_dict",
")",
":",
"validator",
"=",
"tester",
".",
"validator",
"(",
"user_id",
",",
"device_id",
")",
"=",
"tester",
".",
"GetIdsFromCookie",
"(",
"user_cookie",
")",
"request_dict",
"=",
"deepcopy",
"(",
"request_dict",
")",
"device_dict",
"=",
"request_dict",
"[",
"'device_dict'",
"]",
"actual_dict",
"=",
"tester",
".",
"SendRequest",
"(",
"'update_device'",
",",
"user_cookie",
",",
"request_dict",
")",
"device_dict",
"[",
"'user_id'",
"]",
"=",
"user_id",
"if",
"(",
"'push_token'",
"in",
"device_dict",
")",
":",
"device_dict",
"[",
"'alert_user_id'",
"]",
"=",
"user_id",
"device_dict",
".",
"pop",
"(",
"'device_uuid'",
",",
"None",
")",
"device_dict",
".",
"pop",
"(",
"'test_udid'",
",",
"None",
")",
"device",
"=",
"validator",
".",
"ValidateUpdateDBObject",
"(",
"Device",
",",
"last_access",
"=",
"util",
".",
"_TEST_TIME",
",",
"**",
"device_dict",
")",
"if",
"(",
"'push_token'",
"in",
"device_dict",
")",
":",
"predicate",
"=",
"(",
"lambda",
"d",
":",
"(",
"(",
"d",
".",
"push_token",
"==",
"device_dict",
"[",
"'push_token'",
"]",
")",
"and",
"(",
"d",
".",
"device_id",
"!=",
"device_dict",
"[",
"'device_id'",
"]",
")",
")",
")",
"for",
"other_device",
"in",
"validator",
".",
"QueryModelObjects",
"(",
"Device",
",",
"predicate",
"=",
"predicate",
")",
":",
"validator",
".",
"ValidateUpdateDBObject",
"(",
"Device",
",",
"user_id",
"=",
"other_device",
".",
"user_id",
",",
"device_id",
"=",
"other_device",
".",
"device_id",
",",
"push_token",
"=",
"None",
",",
"alert_user_id",
"=",
"None",
")",
"tester",
".",
"_CompareResponseDicts",
"(",
"'update_device'",
",",
"user_id",
",",
"request_dict",
",",
"{",
"}",
",",
"actual_dict",
")",
"return",
"actual_dict"
] |
called by the servicetester in order to test update_device service api call .
|
train
| false
|
2,430
|
def get_rows(rows, header):
header = [x.text_content().strip() for x in header]
keyed_rows = []
for r in rows:
dict_row = {}
for (k, v) in zip(header, r.xpath('td')):
dict_row.update({k: v})
keyed_rows.append(dict_row)
return keyed_rows
|
[
"def",
"get_rows",
"(",
"rows",
",",
"header",
")",
":",
"header",
"=",
"[",
"x",
".",
"text_content",
"(",
")",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"header",
"]",
"keyed_rows",
"=",
"[",
"]",
"for",
"r",
"in",
"rows",
":",
"dict_row",
"=",
"{",
"}",
"for",
"(",
"k",
",",
"v",
")",
"in",
"zip",
"(",
"header",
",",
"r",
".",
"xpath",
"(",
"'td'",
")",
")",
":",
"dict_row",
".",
"update",
"(",
"{",
"k",
":",
"v",
"}",
")",
"keyed_rows",
".",
"append",
"(",
"dict_row",
")",
"return",
"keyed_rows"
] |
takes the rows and header and returns a dict for each row with { key : <td> } .
|
train
| false
|
2,431
|
def create_streamline(x, y, u, v, density=1, angle=(math.pi / 9), arrow_scale=0.09, **kwargs):
utils.validate_equal_length(x, y)
utils.validate_equal_length(u, v)
validate_streamline(x, y)
utils.validate_positive_scalars(density=density, arrow_scale=arrow_scale)
(streamline_x, streamline_y) = _Streamline(x, y, u, v, density, angle, arrow_scale).sum_streamlines()
(arrow_x, arrow_y) = _Streamline(x, y, u, v, density, angle, arrow_scale).get_streamline_arrows()
streamline = graph_objs.Scatter(x=(streamline_x + arrow_x), y=(streamline_y + arrow_y), mode='lines', **kwargs)
data = [streamline]
layout = graph_objs.Layout(hovermode='closest')
return graph_objs.Figure(data=data, layout=layout)
|
[
"def",
"create_streamline",
"(",
"x",
",",
"y",
",",
"u",
",",
"v",
",",
"density",
"=",
"1",
",",
"angle",
"=",
"(",
"math",
".",
"pi",
"/",
"9",
")",
",",
"arrow_scale",
"=",
"0.09",
",",
"**",
"kwargs",
")",
":",
"utils",
".",
"validate_equal_length",
"(",
"x",
",",
"y",
")",
"utils",
".",
"validate_equal_length",
"(",
"u",
",",
"v",
")",
"validate_streamline",
"(",
"x",
",",
"y",
")",
"utils",
".",
"validate_positive_scalars",
"(",
"density",
"=",
"density",
",",
"arrow_scale",
"=",
"arrow_scale",
")",
"(",
"streamline_x",
",",
"streamline_y",
")",
"=",
"_Streamline",
"(",
"x",
",",
"y",
",",
"u",
",",
"v",
",",
"density",
",",
"angle",
",",
"arrow_scale",
")",
".",
"sum_streamlines",
"(",
")",
"(",
"arrow_x",
",",
"arrow_y",
")",
"=",
"_Streamline",
"(",
"x",
",",
"y",
",",
"u",
",",
"v",
",",
"density",
",",
"angle",
",",
"arrow_scale",
")",
".",
"get_streamline_arrows",
"(",
")",
"streamline",
"=",
"graph_objs",
".",
"Scatter",
"(",
"x",
"=",
"(",
"streamline_x",
"+",
"arrow_x",
")",
",",
"y",
"=",
"(",
"streamline_y",
"+",
"arrow_y",
")",
",",
"mode",
"=",
"'lines'",
",",
"**",
"kwargs",
")",
"data",
"=",
"[",
"streamline",
"]",
"layout",
"=",
"graph_objs",
".",
"Layout",
"(",
"hovermode",
"=",
"'closest'",
")",
"return",
"graph_objs",
".",
"Figure",
"(",
"data",
"=",
"data",
",",
"layout",
"=",
"layout",
")"
] |
returns data for a streamline plot .
|
train
| false
|
2,432
|
def test_start_no_text(hist):
hist.start('')
assert (list(hist._tmphist) == HISTORY)
|
[
"def",
"test_start_no_text",
"(",
"hist",
")",
":",
"hist",
".",
"start",
"(",
"''",
")",
"assert",
"(",
"list",
"(",
"hist",
".",
"_tmphist",
")",
"==",
"HISTORY",
")"
] |
test start with no given text .
|
train
| false
|
2,433
|
def change_playlist(new_playlist):
global current_music, current_playlist, next_change_delay
if (music and (new_playlist is not current_playlist)):
current_playlist = new_playlist
if music_enabled:
music.fadeout((fadeout_time * 1000))
next_change_delay = max(0, (change_delay - fadeout_time))
jog_music()
else:
current_music = None
|
[
"def",
"change_playlist",
"(",
"new_playlist",
")",
":",
"global",
"current_music",
",",
"current_playlist",
",",
"next_change_delay",
"if",
"(",
"music",
"and",
"(",
"new_playlist",
"is",
"not",
"current_playlist",
")",
")",
":",
"current_playlist",
"=",
"new_playlist",
"if",
"music_enabled",
":",
"music",
".",
"fadeout",
"(",
"(",
"fadeout_time",
"*",
"1000",
")",
")",
"next_change_delay",
"=",
"max",
"(",
"0",
",",
"(",
"change_delay",
"-",
"fadeout_time",
")",
")",
"jog_music",
"(",
")",
"else",
":",
"current_music",
"=",
"None"
] |
fade out any currently playing music and start playing from the given playlist .
|
train
| false
|
2,435
|
def safe_parse_date(date_hdr):
try:
if (';' in date_hdr):
date_hdr = date_hdr.split(';')[(-1)].strip()
msg_ts = long(rfc822.mktime_tz(rfc822.parsedate_tz(date_hdr)))
if ((msg_ts > (time.time() + (24 * 3600))) or (msg_ts < 1)):
return None
else:
return msg_ts
except (ValueError, TypeError, OverflowError):
return None
|
[
"def",
"safe_parse_date",
"(",
"date_hdr",
")",
":",
"try",
":",
"if",
"(",
"';'",
"in",
"date_hdr",
")",
":",
"date_hdr",
"=",
"date_hdr",
".",
"split",
"(",
"';'",
")",
"[",
"(",
"-",
"1",
")",
"]",
".",
"strip",
"(",
")",
"msg_ts",
"=",
"long",
"(",
"rfc822",
".",
"mktime_tz",
"(",
"rfc822",
".",
"parsedate_tz",
"(",
"date_hdr",
")",
")",
")",
"if",
"(",
"(",
"msg_ts",
">",
"(",
"time",
".",
"time",
"(",
")",
"+",
"(",
"24",
"*",
"3600",
")",
")",
")",
"or",
"(",
"msg_ts",
"<",
"1",
")",
")",
":",
"return",
"None",
"else",
":",
"return",
"msg_ts",
"except",
"(",
"ValueError",
",",
"TypeError",
",",
"OverflowError",
")",
":",
"return",
"None"
] |
parse a date: or received: header into a unix timestamp .
|
train
| false
|
2,436
|
def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod('unable to read _version.py')
mo = re.search("version_json = '''\\n(.*)''' # END VERSION_JSON", contents, (re.M | re.S))
if (not mo):
raise NotThisMethod('no version_json in _version.py')
return json.loads(mo.group(1))
|
[
"def",
"versions_from_file",
"(",
"filename",
")",
":",
"try",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"contents",
"=",
"f",
".",
"read",
"(",
")",
"except",
"EnvironmentError",
":",
"raise",
"NotThisMethod",
"(",
"'unable to read _version.py'",
")",
"mo",
"=",
"re",
".",
"search",
"(",
"\"version_json = '''\\\\n(.*)''' # END VERSION_JSON\"",
",",
"contents",
",",
"(",
"re",
".",
"M",
"|",
"re",
".",
"S",
")",
")",
"if",
"(",
"not",
"mo",
")",
":",
"raise",
"NotThisMethod",
"(",
"'no version_json in _version.py'",
")",
"return",
"json",
".",
"loads",
"(",
"mo",
".",
"group",
"(",
"1",
")",
")"
] |
try to determine the version from _version .
|
train
| true
|
2,437
|
def assign_floating_ip(kwargs=None, call=None):
if (call != 'function'):
log.error('The assign_floating_ip function must be called with -f or --function.')
return False
if (not kwargs):
kwargs = {}
if ('floating_ip' and ('droplet_id' not in kwargs)):
log.error('A floating IP and droplet_id is required.')
return False
result = query(method='floating_ips', command=(kwargs['floating_ip'] + '/actions'), args={'droplet_id': kwargs['droplet_id'], 'type': 'assign'}, http_method='post')
return result
|
[
"def",
"assign_floating_ip",
"(",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'function'",
")",
":",
"log",
".",
"error",
"(",
"'The assign_floating_ip function must be called with -f or --function.'",
")",
"return",
"False",
"if",
"(",
"not",
"kwargs",
")",
":",
"kwargs",
"=",
"{",
"}",
"if",
"(",
"'floating_ip'",
"and",
"(",
"'droplet_id'",
"not",
"in",
"kwargs",
")",
")",
":",
"log",
".",
"error",
"(",
"'A floating IP and droplet_id is required.'",
")",
"return",
"False",
"result",
"=",
"query",
"(",
"method",
"=",
"'floating_ips'",
",",
"command",
"=",
"(",
"kwargs",
"[",
"'floating_ip'",
"]",
"+",
"'/actions'",
")",
",",
"args",
"=",
"{",
"'droplet_id'",
":",
"kwargs",
"[",
"'droplet_id'",
"]",
",",
"'type'",
":",
"'assign'",
"}",
",",
"http_method",
"=",
"'post'",
")",
"return",
"result"
] |
assign a floating ip .
|
train
| true
|
2,439
|
def _ExtractImportantEnvironment(output_of_set):
envvars_to_save = ('goma_.*', 'include', 'lib', 'libpath', 'path', 'pathext', 'systemroot', 'temp', 'tmp')
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match((envvar + '='), line.lower()):
(var, setting) = line.split('=', 1)
if (envvar == 'path'):
setting = ((os.path.dirname(sys.executable) + os.pathsep) + setting)
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if (required not in env):
raise Exception(('Environment variable "%s" required to be set to valid path' % required))
return env
|
[
"def",
"_ExtractImportantEnvironment",
"(",
"output_of_set",
")",
":",
"envvars_to_save",
"=",
"(",
"'goma_.*'",
",",
"'include'",
",",
"'lib'",
",",
"'libpath'",
",",
"'path'",
",",
"'pathext'",
",",
"'systemroot'",
",",
"'temp'",
",",
"'tmp'",
")",
"env",
"=",
"{",
"}",
"for",
"line",
"in",
"output_of_set",
".",
"splitlines",
"(",
")",
":",
"for",
"envvar",
"in",
"envvars_to_save",
":",
"if",
"re",
".",
"match",
"(",
"(",
"envvar",
"+",
"'='",
")",
",",
"line",
".",
"lower",
"(",
")",
")",
":",
"(",
"var",
",",
"setting",
")",
"=",
"line",
".",
"split",
"(",
"'='",
",",
"1",
")",
"if",
"(",
"envvar",
"==",
"'path'",
")",
":",
"setting",
"=",
"(",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"sys",
".",
"executable",
")",
"+",
"os",
".",
"pathsep",
")",
"+",
"setting",
")",
"env",
"[",
"var",
".",
"upper",
"(",
")",
"]",
"=",
"setting",
"break",
"for",
"required",
"in",
"(",
"'SYSTEMROOT'",
",",
"'TEMP'",
",",
"'TMP'",
")",
":",
"if",
"(",
"required",
"not",
"in",
"env",
")",
":",
"raise",
"Exception",
"(",
"(",
"'Environment variable \"%s\" required to be set to valid path'",
"%",
"required",
")",
")",
"return",
"env"
] |
extracts environment variables required for the toolchain to run from a textual dump output by the cmd .
|
train
| false
|
2,442
|
def reload_(name):
term(name)
|
[
"def",
"reload_",
"(",
"name",
")",
":",
"term",
"(",
"name",
")"
] |
reload the named service cli example: .
|
train
| false
|
2,443
|
def write_AlignIO_dna():
assert (1 == AlignIO.convert('Clustalw/opuntia.aln', 'clustal', 'Phylip/opuntia.phy', 'phylip'))
|
[
"def",
"write_AlignIO_dna",
"(",
")",
":",
"assert",
"(",
"1",
"==",
"AlignIO",
".",
"convert",
"(",
"'Clustalw/opuntia.aln'",
",",
"'clustal'",
",",
"'Phylip/opuntia.phy'",
",",
"'phylip'",
")",
")"
] |
convert opuntia .
|
train
| false
|
2,445
|
@frappe.whitelist()
def get_course_schedule_events(start, end, filters=None):
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions(u'Course Schedule', filters)
data = frappe.db.sql(u"select name, course,\n DCTB DCTB DCTB timestamp(schedule_date, from_time) as from_datetime,\n DCTB DCTB DCTB timestamp(schedule_date, to_time) as to_datetime,\n DCTB DCTB DCTB room, student_group, 0 as 'allDay'\n DCTB DCTB from `tabCourse Schedule`\n DCTB DCTB where ( schedule_date between %(start)s and %(end)s )\n DCTB DCTB {conditions}".format(conditions=conditions), {u'start': start, u'end': end}, as_dict=True, update={u'allDay': 0})
return data
|
[
"@",
"frappe",
".",
"whitelist",
"(",
")",
"def",
"get_course_schedule_events",
"(",
"start",
",",
"end",
",",
"filters",
"=",
"None",
")",
":",
"from",
"frappe",
".",
"desk",
".",
"calendar",
"import",
"get_event_conditions",
"conditions",
"=",
"get_event_conditions",
"(",
"u'Course Schedule'",
",",
"filters",
")",
"data",
"=",
"frappe",
".",
"db",
".",
"sql",
"(",
"u\"select name, course,\\n DCTB DCTB DCTB timestamp(schedule_date, from_time) as from_datetime,\\n DCTB DCTB DCTB timestamp(schedule_date, to_time) as to_datetime,\\n DCTB DCTB DCTB room, student_group, 0 as 'allDay'\\n DCTB DCTB from `tabCourse Schedule`\\n DCTB DCTB where ( schedule_date between %(start)s and %(end)s )\\n DCTB DCTB {conditions}\"",
".",
"format",
"(",
"conditions",
"=",
"conditions",
")",
",",
"{",
"u'start'",
":",
"start",
",",
"u'end'",
":",
"end",
"}",
",",
"as_dict",
"=",
"True",
",",
"update",
"=",
"{",
"u'allDay'",
":",
"0",
"}",
")",
"return",
"data"
] |
returns events for course schedule calendar view rendering .
|
train
| false
|
2,446
|
def remove_all(path, pattern='*', keep_folder=False, recursive=False):
if os.path.exists(path):
files = globber_full(path, pattern)
if ((pattern == '*') and (not sabnzbd.WIN32)):
files.extend(globber_full(path, '.*'))
for f in files:
if os.path.isfile(f):
try:
os.remove(f)
except:
logging.info('Cannot remove file %s', f)
elif recursive:
remove_all(f, pattern, False, True)
if (not keep_folder):
try:
os.rmdir(path)
except:
logging.info('Cannot remove folder %s', path)
|
[
"def",
"remove_all",
"(",
"path",
",",
"pattern",
"=",
"'*'",
",",
"keep_folder",
"=",
"False",
",",
"recursive",
"=",
"False",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"files",
"=",
"globber_full",
"(",
"path",
",",
"pattern",
")",
"if",
"(",
"(",
"pattern",
"==",
"'*'",
")",
"and",
"(",
"not",
"sabnzbd",
".",
"WIN32",
")",
")",
":",
"files",
".",
"extend",
"(",
"globber_full",
"(",
"path",
",",
"'.*'",
")",
")",
"for",
"f",
"in",
"files",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"f",
")",
":",
"try",
":",
"os",
".",
"remove",
"(",
"f",
")",
"except",
":",
"logging",
".",
"info",
"(",
"'Cannot remove file %s'",
",",
"f",
")",
"elif",
"recursive",
":",
"remove_all",
"(",
"f",
",",
"pattern",
",",
"False",
",",
"True",
")",
"if",
"(",
"not",
"keep_folder",
")",
":",
"try",
":",
"os",
".",
"rmdir",
"(",
"path",
")",
"except",
":",
"logging",
".",
"info",
"(",
"'Cannot remove folder %s'",
",",
"path",
")"
] |
remove all files in a transaction .
|
train
| false
|
2,448
|
def shared_task(*args, **kwargs):
def create_shared_task(**options):
def __inner(fun):
name = options.get(u'name')
_state.connect_on_app_finalize((lambda app: app._task_from_fun(fun, **options)))
for app in _state._get_active_apps():
if app.finalized:
with app._finalize_mutex:
app._task_from_fun(fun, **options)
def task_by_cons():
app = _state.get_current_app()
return app.tasks[(name or app.gen_task_name(fun.__name__, fun.__module__))]
return Proxy(task_by_cons)
return __inner
if ((len(args) == 1) and callable(args[0])):
return create_shared_task(**kwargs)(args[0])
return create_shared_task(*args, **kwargs)
|
[
"def",
"shared_task",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"def",
"create_shared_task",
"(",
"**",
"options",
")",
":",
"def",
"__inner",
"(",
"fun",
")",
":",
"name",
"=",
"options",
".",
"get",
"(",
"u'name'",
")",
"_state",
".",
"connect_on_app_finalize",
"(",
"(",
"lambda",
"app",
":",
"app",
".",
"_task_from_fun",
"(",
"fun",
",",
"**",
"options",
")",
")",
")",
"for",
"app",
"in",
"_state",
".",
"_get_active_apps",
"(",
")",
":",
"if",
"app",
".",
"finalized",
":",
"with",
"app",
".",
"_finalize_mutex",
":",
"app",
".",
"_task_from_fun",
"(",
"fun",
",",
"**",
"options",
")",
"def",
"task_by_cons",
"(",
")",
":",
"app",
"=",
"_state",
".",
"get_current_app",
"(",
")",
"return",
"app",
".",
"tasks",
"[",
"(",
"name",
"or",
"app",
".",
"gen_task_name",
"(",
"fun",
".",
"__name__",
",",
"fun",
".",
"__module__",
")",
")",
"]",
"return",
"Proxy",
"(",
"task_by_cons",
")",
"return",
"__inner",
"if",
"(",
"(",
"len",
"(",
"args",
")",
"==",
"1",
")",
"and",
"callable",
"(",
"args",
"[",
"0",
"]",
")",
")",
":",
"return",
"create_shared_task",
"(",
"**",
"kwargs",
")",
"(",
"args",
"[",
"0",
"]",
")",
"return",
"create_shared_task",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] |
create shared task .
|
train
| false
|
2,451
|
def show_ring(devname):
try:
ring = ethtool.get_ringparam(devname)
except IOError:
log.error('Ring parameters not supported on {0}'.format(devname))
return 'Not supported'
ret = {}
for (key, value) in ring.items():
ret[ethtool_ring_remap[key]] = ring[key]
return ret
|
[
"def",
"show_ring",
"(",
"devname",
")",
":",
"try",
":",
"ring",
"=",
"ethtool",
".",
"get_ringparam",
"(",
"devname",
")",
"except",
"IOError",
":",
"log",
".",
"error",
"(",
"'Ring parameters not supported on {0}'",
".",
"format",
"(",
"devname",
")",
")",
"return",
"'Not supported'",
"ret",
"=",
"{",
"}",
"for",
"(",
"key",
",",
"value",
")",
"in",
"ring",
".",
"items",
"(",
")",
":",
"ret",
"[",
"ethtool_ring_remap",
"[",
"key",
"]",
"]",
"=",
"ring",
"[",
"key",
"]",
"return",
"ret"
] |
queries the specified network device for rx/tx ring parameter information cli example: .
|
train
| true
|
2,452
|
def get_doctests(text_file_dir):
paths = [os.path.normpath(os.path.join(text_file_dir, path)) for path in TEXT_DOCTEST_PATHS]
if (sys.version_info >= (3,)):
paths = []
suites = []
for path in paths:
suite = doctest.DocFileSuite(path, module_relative=False)
suites.append(suite)
modules = get_module_names()
for module in modules:
suite = doctest.DocTestSuite(module)
suites.append(suite)
return suites
|
[
"def",
"get_doctests",
"(",
"text_file_dir",
")",
":",
"paths",
"=",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"text_file_dir",
",",
"path",
")",
")",
"for",
"path",
"in",
"TEXT_DOCTEST_PATHS",
"]",
"if",
"(",
"sys",
".",
"version_info",
">=",
"(",
"3",
",",
")",
")",
":",
"paths",
"=",
"[",
"]",
"suites",
"=",
"[",
"]",
"for",
"path",
"in",
"paths",
":",
"suite",
"=",
"doctest",
".",
"DocFileSuite",
"(",
"path",
",",
"module_relative",
"=",
"False",
")",
"suites",
".",
"append",
"(",
"suite",
")",
"modules",
"=",
"get_module_names",
"(",
")",
"for",
"module",
"in",
"modules",
":",
"suite",
"=",
"doctest",
".",
"DocTestSuite",
"(",
"module",
")",
"suites",
".",
"append",
"(",
"suite",
")",
"return",
"suites"
] |
return a list of testsuite instances for all doctests in the project .
|
train
| false
|
2,453
|
def _get_datacenter_id(datacenters, identity):
for datacenter in datacenters['items']:
if (identity in (datacenter['properties']['name'], datacenter['id'])):
return datacenter['id']
return None
|
[
"def",
"_get_datacenter_id",
"(",
"datacenters",
",",
"identity",
")",
":",
"for",
"datacenter",
"in",
"datacenters",
"[",
"'items'",
"]",
":",
"if",
"(",
"identity",
"in",
"(",
"datacenter",
"[",
"'properties'",
"]",
"[",
"'name'",
"]",
",",
"datacenter",
"[",
"'id'",
"]",
")",
")",
":",
"return",
"datacenter",
"[",
"'id'",
"]",
"return",
"None"
] |
fetch and return datacenter uuid by datacenter name if found .
|
train
| false
|
2,454
|
def head(url, **kwargs):
kwargs.setdefault('headers', dict(useragent_header))
config = kwargs.pop('config', None)
if config:
kwargs.setdefault('verify', _get_tls_cacert(url, config))
with warnings.catch_warnings():
if (not kwargs.get('verify')):
warnings.filterwarnings('ignore', category=InsecureRequestWarning)
return requests.get(url, **kwargs)
|
[
"def",
"head",
"(",
"url",
",",
"**",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"'headers'",
",",
"dict",
"(",
"useragent_header",
")",
")",
"config",
"=",
"kwargs",
".",
"pop",
"(",
"'config'",
",",
"None",
")",
"if",
"config",
":",
"kwargs",
".",
"setdefault",
"(",
"'verify'",
",",
"_get_tls_cacert",
"(",
"url",
",",
"config",
")",
")",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"if",
"(",
"not",
"kwargs",
".",
"get",
"(",
"'verify'",
")",
")",
":",
"warnings",
".",
"filterwarnings",
"(",
"'ignore'",
",",
"category",
"=",
"InsecureRequestWarning",
")",
"return",
"requests",
".",
"get",
"(",
"url",
",",
"**",
"kwargs",
")"
] |
sends a head request .
|
train
| false
|
2,455
|
def new(rsa_key):
return PKCS115_SigScheme(rsa_key)
|
[
"def",
"new",
"(",
"rsa_key",
")",
":",
"return",
"PKCS115_SigScheme",
"(",
"rsa_key",
")"
] |
return a fresh instance of the hash object .
|
train
| false
|
2,456
|
def expand_to_packages(names, env=None):
if (env is None):
env = os.environ
ros_paths = rospkg.get_ros_paths(env)
rospack = rospkg.RosPack(ros_paths)
rosstack = rospkg.RosStack(ros_paths)
return rospkg.expand_to_packages(names, rospack, rosstack)
|
[
"def",
"expand_to_packages",
"(",
"names",
",",
"env",
"=",
"None",
")",
":",
"if",
"(",
"env",
"is",
"None",
")",
":",
"env",
"=",
"os",
".",
"environ",
"ros_paths",
"=",
"rospkg",
".",
"get_ros_paths",
"(",
"env",
")",
"rospack",
"=",
"rospkg",
".",
"RosPack",
"(",
"ros_paths",
")",
"rosstack",
"=",
"rospkg",
".",
"RosStack",
"(",
"ros_paths",
")",
"return",
"rospkg",
".",
"expand_to_packages",
"(",
"names",
",",
"rospack",
",",
"rosstack",
")"
] |
expand names into a list of packages .
|
train
| false
|
2,457
|
def uniconvertl(l, e):
r = []
try:
for s in l:
r.append(uniconvert(s, e))
except UnicodeError:
raise UnicodeError(('bad filename: ' + os.path.join(l)))
return r
|
[
"def",
"uniconvertl",
"(",
"l",
",",
"e",
")",
":",
"r",
"=",
"[",
"]",
"try",
":",
"for",
"s",
"in",
"l",
":",
"r",
".",
"append",
"(",
"uniconvert",
"(",
"s",
",",
"e",
")",
")",
"except",
"UnicodeError",
":",
"raise",
"UnicodeError",
"(",
"(",
"'bad filename: '",
"+",
"os",
".",
"path",
".",
"join",
"(",
"l",
")",
")",
")",
"return",
"r"
] |
convert a pathlist to a list of strings encoded in encoding "e" using uniconvert .
|
train
| false
|
2,458
|
def user_factory(**kw):
global user_factory_counter
email = kw.pop('email', ('factoryuser%d' % user_factory_counter))
if ('@' not in email):
email = ('%s@mozilla.com' % email)
user = UserProfile.objects.create(email=email, **kw)
if ('email' not in kw):
user_factory_counter = (user.id + 1)
return user
|
[
"def",
"user_factory",
"(",
"**",
"kw",
")",
":",
"global",
"user_factory_counter",
"email",
"=",
"kw",
".",
"pop",
"(",
"'email'",
",",
"(",
"'factoryuser%d'",
"%",
"user_factory_counter",
")",
")",
"if",
"(",
"'@'",
"not",
"in",
"email",
")",
":",
"email",
"=",
"(",
"'%s@mozilla.com'",
"%",
"email",
")",
"user",
"=",
"UserProfile",
".",
"objects",
".",
"create",
"(",
"email",
"=",
"email",
",",
"**",
"kw",
")",
"if",
"(",
"'email'",
"not",
"in",
"kw",
")",
":",
"user_factory_counter",
"=",
"(",
"user",
".",
"id",
"+",
"1",
")",
"return",
"user"
] |
if not provided .
|
train
| false
|
2,459
|
def _test_shape_factory(source, shapes):
fname = os.path.basename(source)
def test_shape(self):
trees = PhyloXMLIO.parse(source)
for (tree, shape_expect) in zip(trees, shapes):
self.assertEqual(len(tree.clade), len(shape_expect))
for (clade, sub_expect) in zip(tree.clade, shape_expect):
self.assertEqual(len(clade), sub_expect[0])
for (subclade, len_expect) in zip(clade, sub_expect[1]):
self.assertEqual(len(subclade), len_expect)
test_shape.__doc__ = ('Check the branching structure of %s.' % fname)
return test_shape
|
[
"def",
"_test_shape_factory",
"(",
"source",
",",
"shapes",
")",
":",
"fname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"source",
")",
"def",
"test_shape",
"(",
"self",
")",
":",
"trees",
"=",
"PhyloXMLIO",
".",
"parse",
"(",
"source",
")",
"for",
"(",
"tree",
",",
"shape_expect",
")",
"in",
"zip",
"(",
"trees",
",",
"shapes",
")",
":",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"tree",
".",
"clade",
")",
",",
"len",
"(",
"shape_expect",
")",
")",
"for",
"(",
"clade",
",",
"sub_expect",
")",
"in",
"zip",
"(",
"tree",
".",
"clade",
",",
"shape_expect",
")",
":",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"clade",
")",
",",
"sub_expect",
"[",
"0",
"]",
")",
"for",
"(",
"subclade",
",",
"len_expect",
")",
"in",
"zip",
"(",
"clade",
",",
"sub_expect",
"[",
"1",
"]",
")",
":",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"subclade",
")",
",",
"len_expect",
")",
"test_shape",
".",
"__doc__",
"=",
"(",
"'Check the branching structure of %s.'",
"%",
"fname",
")",
"return",
"test_shape"
] |
generate a test method for checking tree shapes .
|
train
| false
|
2,460
|
def get_python_console(namespace=None):
if (namespace is None):
import inspect
frame = inspect.currentframe()
caller = frame.f_back
if (not caller):
logging.error("can't find caller who start this console.")
caller = frame
namespace = dict(caller.f_globals)
namespace.update(caller.f_locals)
try:
from IPython.terminal.interactiveshell import TerminalInteractiveShell
shell = TerminalInteractiveShell(user_ns=namespace)
except ImportError:
try:
import readline
import rlcompleter
readline.set_completer(rlcompleter.Completer(namespace).complete)
readline.parse_and_bind('tab: complete')
except ImportError:
pass
import code
shell = code.InteractiveConsole(namespace)
shell._quit = False
def exit():
shell._quit = True
def readfunc(prompt=''):
if shell._quit:
raise EOFError
return six.moves.input(prompt)
shell.ask_exit = exit
shell.raw_input = readfunc
return shell
|
[
"def",
"get_python_console",
"(",
"namespace",
"=",
"None",
")",
":",
"if",
"(",
"namespace",
"is",
"None",
")",
":",
"import",
"inspect",
"frame",
"=",
"inspect",
".",
"currentframe",
"(",
")",
"caller",
"=",
"frame",
".",
"f_back",
"if",
"(",
"not",
"caller",
")",
":",
"logging",
".",
"error",
"(",
"\"can't find caller who start this console.\"",
")",
"caller",
"=",
"frame",
"namespace",
"=",
"dict",
"(",
"caller",
".",
"f_globals",
")",
"namespace",
".",
"update",
"(",
"caller",
".",
"f_locals",
")",
"try",
":",
"from",
"IPython",
".",
"terminal",
".",
"interactiveshell",
"import",
"TerminalInteractiveShell",
"shell",
"=",
"TerminalInteractiveShell",
"(",
"user_ns",
"=",
"namespace",
")",
"except",
"ImportError",
":",
"try",
":",
"import",
"readline",
"import",
"rlcompleter",
"readline",
".",
"set_completer",
"(",
"rlcompleter",
".",
"Completer",
"(",
"namespace",
")",
".",
"complete",
")",
"readline",
".",
"parse_and_bind",
"(",
"'tab: complete'",
")",
"except",
"ImportError",
":",
"pass",
"import",
"code",
"shell",
"=",
"code",
".",
"InteractiveConsole",
"(",
"namespace",
")",
"shell",
".",
"_quit",
"=",
"False",
"def",
"exit",
"(",
")",
":",
"shell",
".",
"_quit",
"=",
"True",
"def",
"readfunc",
"(",
"prompt",
"=",
"''",
")",
":",
"if",
"shell",
".",
"_quit",
":",
"raise",
"EOFError",
"return",
"six",
".",
"moves",
".",
"input",
"(",
"prompt",
")",
"shell",
".",
"ask_exit",
"=",
"exit",
"shell",
".",
"raw_input",
"=",
"readfunc",
"return",
"shell"
] |
return a interactive python console instance with callers stack .
|
train
| true
|
2,462
|
def get_common_sass_directories():
applicable_directories = list()
applicable_directories.append({'sass_source_dir': path('common/static/sass'), 'css_destination_dir': path('common/static/css'), 'lookup_paths': COMMON_LOOKUP_PATHS})
return applicable_directories
|
[
"def",
"get_common_sass_directories",
"(",
")",
":",
"applicable_directories",
"=",
"list",
"(",
")",
"applicable_directories",
".",
"append",
"(",
"{",
"'sass_source_dir'",
":",
"path",
"(",
"'common/static/sass'",
")",
",",
"'css_destination_dir'",
":",
"path",
"(",
"'common/static/css'",
")",
",",
"'lookup_paths'",
":",
"COMMON_LOOKUP_PATHS",
"}",
")",
"return",
"applicable_directories"
] |
determine the set of common sass directories to be compiled for all the systems and themes .
|
train
| false
|
2,463
|
def text_to_word_sequence(text, filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~ DCTB \n', lower=True, split=' '):
if lower:
text = text.lower()
text = text.translate(maketrans(filters, (split * len(filters))))
seq = text.split(split)
return [i for i in seq if i]
|
[
"def",
"text_to_word_sequence",
"(",
"text",
",",
"filters",
"=",
"'!\"#$%&()*+,-./:;<=>?@[\\\\]^_`{|}~ DCTB \\n'",
",",
"lower",
"=",
"True",
",",
"split",
"=",
"' '",
")",
":",
"if",
"lower",
":",
"text",
"=",
"text",
".",
"lower",
"(",
")",
"text",
"=",
"text",
".",
"translate",
"(",
"maketrans",
"(",
"filters",
",",
"(",
"split",
"*",
"len",
"(",
"filters",
")",
")",
")",
")",
"seq",
"=",
"text",
".",
"split",
"(",
"split",
")",
"return",
"[",
"i",
"for",
"i",
"in",
"seq",
"if",
"i",
"]"
] |
converts a text to a sequence of word indices .
|
train
| false
|
2,465
|
def pvremove(devices, override=True):
if isinstance(devices, six.string_types):
devices = devices.split(',')
cmd = ['pvremove', '-y']
for device in devices:
if pvdisplay(device):
cmd.append(device)
elif (not override):
raise CommandExecutionError('{0} is not a physical volume'.format(device))
if (not cmd[2:]):
return True
out = __salt__['cmd.run_all'](cmd, python_shell=False)
if out.get('retcode'):
raise CommandExecutionError(out.get('stderr'))
for device in devices:
if pvdisplay(device):
raise CommandExecutionError('Device "{0}" was not affected.'.format(device))
return True
|
[
"def",
"pvremove",
"(",
"devices",
",",
"override",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"devices",
",",
"six",
".",
"string_types",
")",
":",
"devices",
"=",
"devices",
".",
"split",
"(",
"','",
")",
"cmd",
"=",
"[",
"'pvremove'",
",",
"'-y'",
"]",
"for",
"device",
"in",
"devices",
":",
"if",
"pvdisplay",
"(",
"device",
")",
":",
"cmd",
".",
"append",
"(",
"device",
")",
"elif",
"(",
"not",
"override",
")",
":",
"raise",
"CommandExecutionError",
"(",
"'{0} is not a physical volume'",
".",
"format",
"(",
"device",
")",
")",
"if",
"(",
"not",
"cmd",
"[",
"2",
":",
"]",
")",
":",
"return",
"True",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"if",
"out",
".",
"get",
"(",
"'retcode'",
")",
":",
"raise",
"CommandExecutionError",
"(",
"out",
".",
"get",
"(",
"'stderr'",
")",
")",
"for",
"device",
"in",
"devices",
":",
"if",
"pvdisplay",
"(",
"device",
")",
":",
"raise",
"CommandExecutionError",
"(",
"'Device \"{0}\" was not affected.'",
".",
"format",
"(",
"device",
")",
")",
"return",
"True"
] |
remove a physical device being used as an lvm physical volume override skip devices .
|
train
| true
|
2,466
|
def test_topic_delete(topic):
assert (topic.user.post_count == 1)
assert (topic.post_count == 1)
assert (topic.forum.topic_count == 1)
assert (topic.forum.post_count == 1)
topic.delete(users=[topic.user])
forum = Forum.query.filter_by(id=topic.forum_id).first()
user = User.query.filter_by(id=topic.user_id).first()
topic = Topic.query.filter_by(id=topic.id).first()
assert (topic is None)
assert (user.post_count == 0)
assert (forum.topic_count == 0)
assert (forum.post_count == 0)
assert (forum.last_post_id is None)
|
[
"def",
"test_topic_delete",
"(",
"topic",
")",
":",
"assert",
"(",
"topic",
".",
"user",
".",
"post_count",
"==",
"1",
")",
"assert",
"(",
"topic",
".",
"post_count",
"==",
"1",
")",
"assert",
"(",
"topic",
".",
"forum",
".",
"topic_count",
"==",
"1",
")",
"assert",
"(",
"topic",
".",
"forum",
".",
"post_count",
"==",
"1",
")",
"topic",
".",
"delete",
"(",
"users",
"=",
"[",
"topic",
".",
"user",
"]",
")",
"forum",
"=",
"Forum",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"topic",
".",
"forum_id",
")",
".",
"first",
"(",
")",
"user",
"=",
"User",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"topic",
".",
"user_id",
")",
".",
"first",
"(",
")",
"topic",
"=",
"Topic",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"topic",
".",
"id",
")",
".",
"first",
"(",
")",
"assert",
"(",
"topic",
"is",
"None",
")",
"assert",
"(",
"user",
".",
"post_count",
"==",
"0",
")",
"assert",
"(",
"forum",
".",
"topic_count",
"==",
"0",
")",
"assert",
"(",
"forum",
".",
"post_count",
"==",
"0",
")",
"assert",
"(",
"forum",
".",
"last_post_id",
"is",
"None",
")"
] |
test the delete topic method .
|
train
| false
|
2,468
|
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.'))
@utils.arg('attachment_id', metavar='<attachment>', help=_('Attachment ID of the volume.'))
@utils.arg('new_volume', metavar='<volume>', help=_('ID of the volume to attach.'))
def do_volume_update(cs, args):
cs.volumes.update_server_volume(_find_server(cs, args.server).id, args.attachment_id, args.new_volume)
|
[
"@",
"utils",
".",
"arg",
"(",
"'server'",
",",
"metavar",
"=",
"'<server>'",
",",
"help",
"=",
"_",
"(",
"'Name or ID of server.'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'attachment_id'",
",",
"metavar",
"=",
"'<attachment>'",
",",
"help",
"=",
"_",
"(",
"'Attachment ID of the volume.'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'new_volume'",
",",
"metavar",
"=",
"'<volume>'",
",",
"help",
"=",
"_",
"(",
"'ID of the volume to attach.'",
")",
")",
"def",
"do_volume_update",
"(",
"cs",
",",
"args",
")",
":",
"cs",
".",
"volumes",
".",
"update_server_volume",
"(",
"_find_server",
"(",
"cs",
",",
"args",
".",
"server",
")",
".",
"id",
",",
"args",
".",
"attachment_id",
",",
"args",
".",
"new_volume",
")"
] |
update volume attachment .
|
train
| false
|
2,469
|
def write_version_py(filename=None):
doc = '"""\nThis is a VERSION file and should NOT be manually altered\n"""'
doc += ("\nversion = '%s'" % VERSION)
if (not filename):
filename = os.path.join(os.path.dirname(__file__), 'quantecon', 'version.py')
fl = open(filename, 'w')
try:
fl.write(doc)
finally:
fl.close()
|
[
"def",
"write_version_py",
"(",
"filename",
"=",
"None",
")",
":",
"doc",
"=",
"'\"\"\"\\nThis is a VERSION file and should NOT be manually altered\\n\"\"\"'",
"doc",
"+=",
"(",
"\"\\nversion = '%s'\"",
"%",
"VERSION",
")",
"if",
"(",
"not",
"filename",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'quantecon'",
",",
"'version.py'",
")",
"fl",
"=",
"open",
"(",
"filename",
",",
"'w'",
")",
"try",
":",
"fl",
".",
"write",
"(",
"doc",
")",
"finally",
":",
"fl",
".",
"close",
"(",
")"
] |
this constructs a version file for the project .
|
train
| true
|
2,471
|
@cronjobs.register
def reload_question_traffic_stats():
if settings.STAGE:
return
QuestionVisits.reload_from_analytics(verbose=settings.DEBUG)
|
[
"@",
"cronjobs",
".",
"register",
"def",
"reload_question_traffic_stats",
"(",
")",
":",
"if",
"settings",
".",
"STAGE",
":",
"return",
"QuestionVisits",
".",
"reload_from_analytics",
"(",
"verbose",
"=",
"settings",
".",
"DEBUG",
")"
] |
reload question views from the analytics .
|
train
| false
|
2,472
|
@pytest.mark.network
def test_no_extras_uninstall(script):
result = script.pip('install', 'Paste[openid]==1.7.5.1', expect_stderr=True)
assert (join(script.site_packages, 'paste') in result.files_created), sorted(result.files_created.keys())
assert (join(script.site_packages, 'openid') in result.files_created), sorted(result.files_created.keys())
result2 = script.pip('uninstall', 'Paste', '-y')
initools_folder = (script.site_packages / 'openid')
assert (initools_folder not in result2.files_deleted), result.files_deleted
|
[
"@",
"pytest",
".",
"mark",
".",
"network",
"def",
"test_no_extras_uninstall",
"(",
"script",
")",
":",
"result",
"=",
"script",
".",
"pip",
"(",
"'install'",
",",
"'Paste[openid]==1.7.5.1'",
",",
"expect_stderr",
"=",
"True",
")",
"assert",
"(",
"join",
"(",
"script",
".",
"site_packages",
",",
"'paste'",
")",
"in",
"result",
".",
"files_created",
")",
",",
"sorted",
"(",
"result",
".",
"files_created",
".",
"keys",
"(",
")",
")",
"assert",
"(",
"join",
"(",
"script",
".",
"site_packages",
",",
"'openid'",
")",
"in",
"result",
".",
"files_created",
")",
",",
"sorted",
"(",
"result",
".",
"files_created",
".",
"keys",
"(",
")",
")",
"result2",
"=",
"script",
".",
"pip",
"(",
"'uninstall'",
",",
"'Paste'",
",",
"'-y'",
")",
"initools_folder",
"=",
"(",
"script",
".",
"site_packages",
"/",
"'openid'",
")",
"assert",
"(",
"initools_folder",
"not",
"in",
"result2",
".",
"files_deleted",
")",
",",
"result",
".",
"files_deleted"
] |
no extras dependency gets uninstalled when the root package is uninstalled .
|
train
| false
|
2,473
|
@pytest.fixture
def bookmark_manager_stub(stubs):
stub = stubs.BookmarkManagerStub()
objreg.register('bookmark-manager', stub)
(yield stub)
objreg.delete('bookmark-manager')
|
[
"@",
"pytest",
".",
"fixture",
"def",
"bookmark_manager_stub",
"(",
"stubs",
")",
":",
"stub",
"=",
"stubs",
".",
"BookmarkManagerStub",
"(",
")",
"objreg",
".",
"register",
"(",
"'bookmark-manager'",
",",
"stub",
")",
"(",
"yield",
"stub",
")",
"objreg",
".",
"delete",
"(",
"'bookmark-manager'",
")"
] |
fixture which provides a fake bookmark manager object .
|
train
| false
|
2,474
|
def port_create_gre(br, port, id, remote):
if (not (0 <= id < (2 ** 32))):
return False
elif (not __salt__['dig.check_ip'](remote)):
return False
elif (not bridge_exists(br)):
return False
elif (port in port_list(br)):
cmd = 'ovs-vsctl set interface {0} type=gre options:remote_ip={1} options:key={2}'.format(port, remote, id)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode'])
else:
cmd = 'ovs-vsctl add-port {0} {1} -- set interface {1} type=gre options:remote_ip={2} options:key={3}'.format(br, port, remote, id)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode'])
|
[
"def",
"port_create_gre",
"(",
"br",
",",
"port",
",",
"id",
",",
"remote",
")",
":",
"if",
"(",
"not",
"(",
"0",
"<=",
"id",
"<",
"(",
"2",
"**",
"32",
")",
")",
")",
":",
"return",
"False",
"elif",
"(",
"not",
"__salt__",
"[",
"'dig.check_ip'",
"]",
"(",
"remote",
")",
")",
":",
"return",
"False",
"elif",
"(",
"not",
"bridge_exists",
"(",
"br",
")",
")",
":",
"return",
"False",
"elif",
"(",
"port",
"in",
"port_list",
"(",
"br",
")",
")",
":",
"cmd",
"=",
"'ovs-vsctl set interface {0} type=gre options:remote_ip={1} options:key={2}'",
".",
"format",
"(",
"port",
",",
"remote",
",",
"id",
")",
"result",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"return",
"_retcode_to_bool",
"(",
"result",
"[",
"'retcode'",
"]",
")",
"else",
":",
"cmd",
"=",
"'ovs-vsctl add-port {0} {1} -- set interface {1} type=gre options:remote_ip={2} options:key={3}'",
".",
"format",
"(",
"br",
",",
"port",
",",
"remote",
",",
"id",
")",
"result",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"return",
"_retcode_to_bool",
"(",
"result",
"[",
"'retcode'",
"]",
")"
] |
generic routing encapsulation - creates gre tunnel between endpoints .
|
train
| true
|
2,477
|
def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
xy_iter = node_attribute_xy(G, attribute, nodes)
return mixing_dict(xy_iter, normalized=normalized)
|
[
"def",
"attribute_mixing_dict",
"(",
"G",
",",
"attribute",
",",
"nodes",
"=",
"None",
",",
"normalized",
"=",
"False",
")",
":",
"xy_iter",
"=",
"node_attribute_xy",
"(",
"G",
",",
"attribute",
",",
"nodes",
")",
"return",
"mixing_dict",
"(",
"xy_iter",
",",
"normalized",
"=",
"normalized",
")"
] |
return dictionary representation of mixing matrix for attribute .
|
train
| false
|
2,478
|
def safe_min(X):
if issparse(X):
if (len(X.data) == 0):
return 0
m = X.data.min()
return (m if (X.getnnz() == X.size) else min(m, 0))
else:
return X.min()
|
[
"def",
"safe_min",
"(",
"X",
")",
":",
"if",
"issparse",
"(",
"X",
")",
":",
"if",
"(",
"len",
"(",
"X",
".",
"data",
")",
"==",
"0",
")",
":",
"return",
"0",
"m",
"=",
"X",
".",
"data",
".",
"min",
"(",
")",
"return",
"(",
"m",
"if",
"(",
"X",
".",
"getnnz",
"(",
")",
"==",
"X",
".",
"size",
")",
"else",
"min",
"(",
"m",
",",
"0",
")",
")",
"else",
":",
"return",
"X",
".",
"min",
"(",
")"
] |
returns the minimum value of a dense or a csr/csc matrix .
|
train
| false
|
2,479
|
def in6_ismsladdr(str):
return in6_isincluded(str, 'ff05::', 16)
|
[
"def",
"in6_ismsladdr",
"(",
"str",
")",
":",
"return",
"in6_isincluded",
"(",
"str",
",",
"'ff05::'",
",",
"16",
")"
] |
returns true if address belongs to site-local multicast address space .
|
train
| false
|
2,480
|
def _escape_unicode(value, mapping=None):
return value.translate(_escape_table)
|
[
"def",
"_escape_unicode",
"(",
"value",
",",
"mapping",
"=",
"None",
")",
":",
"return",
"value",
".",
"translate",
"(",
"_escape_table",
")"
] |
escapes *value* without adding quote .
|
train
| false
|
2,481
|
def python_script_exists(package=None, module=None):
assert (module is not None)
try:
if (package is None):
path = imp.find_module(module)[1]
else:
path = (osp.join(imp.find_module(package)[1], module) + '.py')
except ImportError:
return
if (not osp.isfile(path)):
path += 'w'
if osp.isfile(path):
return path
|
[
"def",
"python_script_exists",
"(",
"package",
"=",
"None",
",",
"module",
"=",
"None",
")",
":",
"assert",
"(",
"module",
"is",
"not",
"None",
")",
"try",
":",
"if",
"(",
"package",
"is",
"None",
")",
":",
"path",
"=",
"imp",
".",
"find_module",
"(",
"module",
")",
"[",
"1",
"]",
"else",
":",
"path",
"=",
"(",
"osp",
".",
"join",
"(",
"imp",
".",
"find_module",
"(",
"package",
")",
"[",
"1",
"]",
",",
"module",
")",
"+",
"'.py'",
")",
"except",
"ImportError",
":",
"return",
"if",
"(",
"not",
"osp",
".",
"isfile",
"(",
"path",
")",
")",
":",
"path",
"+=",
"'w'",
"if",
"osp",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"path"
] |
return absolute path if python script exists package=none -> module is in sys .
|
train
| true
|
2,483
|
def style():
field = s3db.gis_style.layer_id
field.readable = field.writable = True
field.label = T('Layer')
represent = field.represent = s3base.S3Represent(lookup='gis_layer_entity')
field.requires = IS_ONE_OF(db, 'gis_layer_entity.layer_id', represent)
return s3_rest_controller()
|
[
"def",
"style",
"(",
")",
":",
"field",
"=",
"s3db",
".",
"gis_style",
".",
"layer_id",
"field",
".",
"readable",
"=",
"field",
".",
"writable",
"=",
"True",
"field",
".",
"label",
"=",
"T",
"(",
"'Layer'",
")",
"represent",
"=",
"field",
".",
"represent",
"=",
"s3base",
".",
"S3Represent",
"(",
"lookup",
"=",
"'gis_layer_entity'",
")",
"field",
".",
"requires",
"=",
"IS_ONE_OF",
"(",
"db",
",",
"'gis_layer_entity.layer_id'",
",",
"represent",
")",
"return",
"s3_rest_controller",
"(",
")"
] |
restful crud controller .
|
train
| false
|
2,484
|
def tag(accessing_obj, accessed_obj, *args, **kwargs):
if hasattr(accessing_obj, 'obj'):
accessing_obj = accessing_obj.obj
tagkey = (args[0] if args else None)
category = (args[1] if (len(args) > 1) else None)
return accessing_obj.tags.get(tagkey, category=category)
|
[
"def",
"tag",
"(",
"accessing_obj",
",",
"accessed_obj",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"hasattr",
"(",
"accessing_obj",
",",
"'obj'",
")",
":",
"accessing_obj",
"=",
"accessing_obj",
".",
"obj",
"tagkey",
"=",
"(",
"args",
"[",
"0",
"]",
"if",
"args",
"else",
"None",
")",
"category",
"=",
"(",
"args",
"[",
"1",
"]",
"if",
"(",
"len",
"(",
"args",
")",
">",
"1",
")",
"else",
"None",
")",
"return",
"accessing_obj",
".",
"tags",
".",
"get",
"(",
"tagkey",
",",
"category",
"=",
"category",
")"
] |
restful crud controller .
|
train
| false
|
2,485
|
def open_firewall_for_docker_api(distribution):
if is_centos_or_rhel(distribution):
upload = put(path='/usr/lib/firewalld/services/docker.xml', content=dedent(' <?xml version="1.0" encoding="utf-8"?>\n <service>\n <short>Docker API Port</short>\n <description>The Docker API, over TLS.</description>\n <port protocol="tcp" port="2376"/>\n </service>\n '))
open_firewall = open_firewalld
elif is_ubuntu(distribution):
upload = put(path='/etc/ufw/applications.d/docker', content=dedent('\n [docker]\n title=Docker API\n description=Docker API.\n ports=2376/tcp\n '))
open_firewall = open_ufw
else:
raise DistributionNotSupported(distribution=distribution)
return sequence([upload, open_firewall('docker')])
|
[
"def",
"open_firewall_for_docker_api",
"(",
"distribution",
")",
":",
"if",
"is_centos_or_rhel",
"(",
"distribution",
")",
":",
"upload",
"=",
"put",
"(",
"path",
"=",
"'/usr/lib/firewalld/services/docker.xml'",
",",
"content",
"=",
"dedent",
"(",
"' <?xml version=\"1.0\" encoding=\"utf-8\"?>\\n <service>\\n <short>Docker API Port</short>\\n <description>The Docker API, over TLS.</description>\\n <port protocol=\"tcp\" port=\"2376\"/>\\n </service>\\n '",
")",
")",
"open_firewall",
"=",
"open_firewalld",
"elif",
"is_ubuntu",
"(",
"distribution",
")",
":",
"upload",
"=",
"put",
"(",
"path",
"=",
"'/etc/ufw/applications.d/docker'",
",",
"content",
"=",
"dedent",
"(",
"'\\n [docker]\\n title=Docker API\\n description=Docker API.\\n ports=2376/tcp\\n '",
")",
")",
"open_firewall",
"=",
"open_ufw",
"else",
":",
"raise",
"DistributionNotSupported",
"(",
"distribution",
"=",
"distribution",
")",
"return",
"sequence",
"(",
"[",
"upload",
",",
"open_firewall",
"(",
"'docker'",
")",
"]",
")"
] |
open the firewall for remote access to docker api .
|
train
| false
|
2,486
|
def _ds(elem):
_indent(elem)
return ElementTree.tostring(elem)
|
[
"def",
"_ds",
"(",
"elem",
")",
":",
"_indent",
"(",
"elem",
")",
"return",
"ElementTree",
".",
"tostring",
"(",
"elem",
")"
] |
elementtree debug function .
|
train
| false
|
2,487
|
def interpret_data(data, colnames=None, rownames=None):
if isinstance(data, np.ndarray):
if _is_structured_ndarray(data):
if (colnames is None):
colnames = data.dtype.names
values = struct_to_ndarray(data)
else:
values = data
if (colnames is None):
colnames = [('Y_%d' % i) for i in range(values.shape[1])]
elif is_data_frame(data):
data = data.dropna()
values = data.values
colnames = data.columns
rownames = data.index
else:
raise Exception('cannot handle other input types at the moment')
if (not isinstance(colnames, list)):
colnames = list(colnames)
if (len(colnames) != values.shape[1]):
raise ValueError('length of colnames does not match number of columns in data')
if ((rownames is not None) and (len(rownames) != len(values))):
raise ValueError('length of rownames does not match number of rows in data')
return (values, colnames, rownames)
|
[
"def",
"interpret_data",
"(",
"data",
",",
"colnames",
"=",
"None",
",",
"rownames",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"np",
".",
"ndarray",
")",
":",
"if",
"_is_structured_ndarray",
"(",
"data",
")",
":",
"if",
"(",
"colnames",
"is",
"None",
")",
":",
"colnames",
"=",
"data",
".",
"dtype",
".",
"names",
"values",
"=",
"struct_to_ndarray",
"(",
"data",
")",
"else",
":",
"values",
"=",
"data",
"if",
"(",
"colnames",
"is",
"None",
")",
":",
"colnames",
"=",
"[",
"(",
"'Y_%d'",
"%",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"values",
".",
"shape",
"[",
"1",
"]",
")",
"]",
"elif",
"is_data_frame",
"(",
"data",
")",
":",
"data",
"=",
"data",
".",
"dropna",
"(",
")",
"values",
"=",
"data",
".",
"values",
"colnames",
"=",
"data",
".",
"columns",
"rownames",
"=",
"data",
".",
"index",
"else",
":",
"raise",
"Exception",
"(",
"'cannot handle other input types at the moment'",
")",
"if",
"(",
"not",
"isinstance",
"(",
"colnames",
",",
"list",
")",
")",
":",
"colnames",
"=",
"list",
"(",
"colnames",
")",
"if",
"(",
"len",
"(",
"colnames",
")",
"!=",
"values",
".",
"shape",
"[",
"1",
"]",
")",
":",
"raise",
"ValueError",
"(",
"'length of colnames does not match number of columns in data'",
")",
"if",
"(",
"(",
"rownames",
"is",
"not",
"None",
")",
"and",
"(",
"len",
"(",
"rownames",
")",
"!=",
"len",
"(",
"values",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"'length of rownames does not match number of rows in data'",
")",
"return",
"(",
"values",
",",
"colnames",
",",
"rownames",
")"
] |
convert passed data structure to form required by estimation classes parameters data : ndarray-like colnames : sequence or none may be part of data structure rownames : sequence or none returns : .
|
train
| false
|
2,488
|
def test_install_from_local_directory(script, data):
to_install = data.packages.join('FSPkg')
result = script.pip('install', to_install, expect_error=False)
fspkg_folder = (script.site_packages / 'fspkg')
egg_info_folder = ((script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info') % pyversion)
assert (fspkg_folder in result.files_created), str(result.stdout)
assert (egg_info_folder in result.files_created), str(result)
|
[
"def",
"test_install_from_local_directory",
"(",
"script",
",",
"data",
")",
":",
"to_install",
"=",
"data",
".",
"packages",
".",
"join",
"(",
"'FSPkg'",
")",
"result",
"=",
"script",
".",
"pip",
"(",
"'install'",
",",
"to_install",
",",
"expect_error",
"=",
"False",
")",
"fspkg_folder",
"=",
"(",
"script",
".",
"site_packages",
"/",
"'fspkg'",
")",
"egg_info_folder",
"=",
"(",
"(",
"script",
".",
"site_packages",
"/",
"'FSPkg-0.1.dev0-py%s.egg-info'",
")",
"%",
"pyversion",
")",
"assert",
"(",
"fspkg_folder",
"in",
"result",
".",
"files_created",
")",
",",
"str",
"(",
"result",
".",
"stdout",
")",
"assert",
"(",
"egg_info_folder",
"in",
"result",
".",
"files_created",
")",
",",
"str",
"(",
"result",
")"
] |
test installing from a local directory .
|
train
| false
|
2,489
|
def group_volume_type_mapping_create(context, group_id, volume_type_id):
return IMPL.group_volume_type_mapping_create(context, group_id, volume_type_id)
|
[
"def",
"group_volume_type_mapping_create",
"(",
"context",
",",
"group_id",
",",
"volume_type_id",
")",
":",
"return",
"IMPL",
".",
"group_volume_type_mapping_create",
"(",
"context",
",",
"group_id",
",",
"volume_type_id",
")"
] |
add group volume_type mapping entry .
|
train
| false
|
2,490
|
def _make_seqfeature(name, from_res, to_res, description, ft_id):
loc = SeqFeature.FeatureLocation(_make_position(from_res, (-1)), _make_position(to_res, 0))
if (not ft_id):
ft_id = '<unknown id>'
return SeqFeature.SeqFeature(loc, type=name, id=ft_id, qualifiers={'description': description})
|
[
"def",
"_make_seqfeature",
"(",
"name",
",",
"from_res",
",",
"to_res",
",",
"description",
",",
"ft_id",
")",
":",
"loc",
"=",
"SeqFeature",
".",
"FeatureLocation",
"(",
"_make_position",
"(",
"from_res",
",",
"(",
"-",
"1",
")",
")",
",",
"_make_position",
"(",
"to_res",
",",
"0",
")",
")",
"if",
"(",
"not",
"ft_id",
")",
":",
"ft_id",
"=",
"'<unknown id>'",
"return",
"SeqFeature",
".",
"SeqFeature",
"(",
"loc",
",",
"type",
"=",
"name",
",",
"id",
"=",
"ft_id",
",",
"qualifiers",
"=",
"{",
"'description'",
":",
"description",
"}",
")"
] |
construct seqfeature from feature data from parser .
|
train
| false
|
2,492
|
def expand_cell_ranges(range_string):
cells = []
for rs in range_string.split():
cells.extend(rows_from_range(rs))
return list(chain.from_iterable(cells))
|
[
"def",
"expand_cell_ranges",
"(",
"range_string",
")",
":",
"cells",
"=",
"[",
"]",
"for",
"rs",
"in",
"range_string",
".",
"split",
"(",
")",
":",
"cells",
".",
"extend",
"(",
"rows_from_range",
"(",
"rs",
")",
")",
"return",
"list",
"(",
"chain",
".",
"from_iterable",
"(",
"cells",
")",
")"
] |
expand cell ranges to a sequence of addresses .
|
train
| false
|
2,494
|
def tokey(*args):
salt = u'||'.join([force_text(arg) for arg in args])
hash_ = hashlib.md5(encode(salt))
return hash_.hexdigest()
|
[
"def",
"tokey",
"(",
"*",
"args",
")",
":",
"salt",
"=",
"u'||'",
".",
"join",
"(",
"[",
"force_text",
"(",
"arg",
")",
"for",
"arg",
"in",
"args",
"]",
")",
"hash_",
"=",
"hashlib",
".",
"md5",
"(",
"encode",
"(",
"salt",
")",
")",
"return",
"hash_",
".",
"hexdigest",
"(",
")"
] |
computes a unique key from arguments given .
|
train
| true
|
2,495
|
def Hists(hists, **options):
for hist in hists:
Hist(hist, **options)
|
[
"def",
"Hists",
"(",
"hists",
",",
"**",
"options",
")",
":",
"for",
"hist",
"in",
"hists",
":",
"Hist",
"(",
"hist",
",",
"**",
"options",
")"
] |
plots two histograms as interleaved bar plots .
|
train
| false
|
2,496
|
def wrap_valid_longitude(lon):
return (((lon + 180.0) % 360.0) - 180.0)
|
[
"def",
"wrap_valid_longitude",
"(",
"lon",
")",
":",
"return",
"(",
"(",
"(",
"lon",
"+",
"180.0",
")",
"%",
"360.0",
")",
"-",
"180.0",
")"
] |
wrap a longitude value around to always have a value in the range [-180 .
|
train
| false
|
2,497
|
def _errorReplaceChars(value):
retVal = value
if value:
retVal = retVal.replace(kb.chars.space, ' ').replace(kb.chars.dollar, '$').replace(kb.chars.at, '@').replace(kb.chars.hash_, '#')
return retVal
|
[
"def",
"_errorReplaceChars",
"(",
"value",
")",
":",
"retVal",
"=",
"value",
"if",
"value",
":",
"retVal",
"=",
"retVal",
".",
"replace",
"(",
"kb",
".",
"chars",
".",
"space",
",",
"' '",
")",
".",
"replace",
"(",
"kb",
".",
"chars",
".",
"dollar",
",",
"'$'",
")",
".",
"replace",
"(",
"kb",
".",
"chars",
".",
"at",
",",
"'@'",
")",
".",
"replace",
"(",
"kb",
".",
"chars",
".",
"hash_",
",",
"'#'",
")",
"return",
"retVal"
] |
restores safely replaced characters .
|
train
| false
|
2,499
|
def CreateSitemapFromFile(configpath, suppress_notify):
num_errors = output.num_errors
sitemap = Sitemap(suppress_notify)
try:
output.Log(('Reading configuration file: %s' % configpath), 0)
xml.sax.parse(configpath, sitemap)
except IOError:
output.Error(('Cannot read configuration file: %s' % configpath))
except xml.sax._exceptions.SAXParseException as e:
output.Error(('XML error in the config file (line %d, column %d): %s' % (e._linenum, e._colnum, e.getMessage())))
except xml.sax._exceptions.SAXReaderNotAvailable:
output.Error('Some installs of Python 2.2 did not include complete support for XML.\n Please try upgrading your version of Python and re-running the script.')
if (num_errors == output.num_errors):
return sitemap
return None
|
[
"def",
"CreateSitemapFromFile",
"(",
"configpath",
",",
"suppress_notify",
")",
":",
"num_errors",
"=",
"output",
".",
"num_errors",
"sitemap",
"=",
"Sitemap",
"(",
"suppress_notify",
")",
"try",
":",
"output",
".",
"Log",
"(",
"(",
"'Reading configuration file: %s'",
"%",
"configpath",
")",
",",
"0",
")",
"xml",
".",
"sax",
".",
"parse",
"(",
"configpath",
",",
"sitemap",
")",
"except",
"IOError",
":",
"output",
".",
"Error",
"(",
"(",
"'Cannot read configuration file: %s'",
"%",
"configpath",
")",
")",
"except",
"xml",
".",
"sax",
".",
"_exceptions",
".",
"SAXParseException",
"as",
"e",
":",
"output",
".",
"Error",
"(",
"(",
"'XML error in the config file (line %d, column %d): %s'",
"%",
"(",
"e",
".",
"_linenum",
",",
"e",
".",
"_colnum",
",",
"e",
".",
"getMessage",
"(",
")",
")",
")",
")",
"except",
"xml",
".",
"sax",
".",
"_exceptions",
".",
"SAXReaderNotAvailable",
":",
"output",
".",
"Error",
"(",
"'Some installs of Python 2.2 did not include complete support for XML.\\n Please try upgrading your version of Python and re-running the script.'",
")",
"if",
"(",
"num_errors",
"==",
"output",
".",
"num_errors",
")",
":",
"return",
"sitemap",
"return",
"None"
] |
sets up a new sitemap object from the specified configuration file .
|
train
| false
|
2,501
|
def _makeFrame(buf, opcode, fin, mask=None):
bufferLength = len(buf)
if (mask is not None):
lengthMask = 128
else:
lengthMask = 0
if (bufferLength > 65535):
length = ('%s%s' % (chr((lengthMask | 127)), pack('>Q', bufferLength)))
elif (bufferLength > 125):
length = ('%s%s' % (chr((lengthMask | 126)), pack('>H', bufferLength)))
else:
length = chr((lengthMask | bufferLength))
if fin:
header = 128
else:
header = 1
header = chr((header | opcode.value))
if (mask is not None):
buf = ('%s%s' % (mask, _mask(buf, mask)))
frame = ('%s%s%s' % (header, length, buf))
return frame
|
[
"def",
"_makeFrame",
"(",
"buf",
",",
"opcode",
",",
"fin",
",",
"mask",
"=",
"None",
")",
":",
"bufferLength",
"=",
"len",
"(",
"buf",
")",
"if",
"(",
"mask",
"is",
"not",
"None",
")",
":",
"lengthMask",
"=",
"128",
"else",
":",
"lengthMask",
"=",
"0",
"if",
"(",
"bufferLength",
">",
"65535",
")",
":",
"length",
"=",
"(",
"'%s%s'",
"%",
"(",
"chr",
"(",
"(",
"lengthMask",
"|",
"127",
")",
")",
",",
"pack",
"(",
"'>Q'",
",",
"bufferLength",
")",
")",
")",
"elif",
"(",
"bufferLength",
">",
"125",
")",
":",
"length",
"=",
"(",
"'%s%s'",
"%",
"(",
"chr",
"(",
"(",
"lengthMask",
"|",
"126",
")",
")",
",",
"pack",
"(",
"'>H'",
",",
"bufferLength",
")",
")",
")",
"else",
":",
"length",
"=",
"chr",
"(",
"(",
"lengthMask",
"|",
"bufferLength",
")",
")",
"if",
"fin",
":",
"header",
"=",
"128",
"else",
":",
"header",
"=",
"1",
"header",
"=",
"chr",
"(",
"(",
"header",
"|",
"opcode",
".",
"value",
")",
")",
"if",
"(",
"mask",
"is",
"not",
"None",
")",
":",
"buf",
"=",
"(",
"'%s%s'",
"%",
"(",
"mask",
",",
"_mask",
"(",
"buf",
",",
"mask",
")",
")",
")",
"frame",
"=",
"(",
"'%s%s%s'",
"%",
"(",
"header",
",",
"length",
",",
"buf",
")",
")",
"return",
"frame"
] |
make a frame .
|
train
| false
|
2,502
|
def STDERR(v):
context.log_console = sys.stderr
|
[
"def",
"STDERR",
"(",
"v",
")",
":",
"context",
".",
"log_console",
"=",
"sys",
".",
"stderr"
] |
sends logging to stderr by default .
|
train
| false
|
2,506
|
def create_home_dir_structure():
for directory in (HOME_NINJA_PATH, EXTENSIONS_PATH, PLUGINS, EDITOR_SKINS, LANGS, NINJA_THEME_DOWNLOAD, NINJA_KNOWLEDGE_PATH):
if (not os.path.isdir(directory)):
os.mkdir(directory)
|
[
"def",
"create_home_dir_structure",
"(",
")",
":",
"for",
"directory",
"in",
"(",
"HOME_NINJA_PATH",
",",
"EXTENSIONS_PATH",
",",
"PLUGINS",
",",
"EDITOR_SKINS",
",",
"LANGS",
",",
"NINJA_THEME_DOWNLOAD",
",",
"NINJA_KNOWLEDGE_PATH",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"directory",
")",
")",
":",
"os",
".",
"mkdir",
"(",
"directory",
")"
] |
create the necesary directories structure for ninja-ide .
|
train
| false
|
2,508
|
def buildLibraries():
print ''
print 'Building required libraries'
print ''
universal = os.path.join(WORKDIR, 'libraries')
os.mkdir(universal)
os.makedirs(os.path.join(universal, 'usr', 'local', 'lib'))
os.makedirs(os.path.join(universal, 'usr', 'local', 'include'))
for recipe in LIBRARY_RECIPES:
buildRecipe(recipe, universal, ARCHLIST)
|
[
"def",
"buildLibraries",
"(",
")",
":",
"print",
"''",
"print",
"'Building required libraries'",
"print",
"''",
"universal",
"=",
"os",
".",
"path",
".",
"join",
"(",
"WORKDIR",
",",
"'libraries'",
")",
"os",
".",
"mkdir",
"(",
"universal",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"join",
"(",
"universal",
",",
"'usr'",
",",
"'local'",
",",
"'lib'",
")",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"join",
"(",
"universal",
",",
"'usr'",
",",
"'local'",
",",
"'include'",
")",
")",
"for",
"recipe",
"in",
"LIBRARY_RECIPES",
":",
"buildRecipe",
"(",
"recipe",
",",
"universal",
",",
"ARCHLIST",
")"
] |
build our dependencies into $workdir/libraries/usr/local .
|
train
| false
|
2,509
|
def test_mixed_inheritance_mro():
class A:
pass
class B(A, ):
pass
class C(A, ):
pass
class D(B, C, ):
pass
class E(D, object, ):
pass
AreEqual(E.__mro__, (E, D, B, A, C, object))
class F(B, C, object, ):
pass
AreEqual(F.__mro__, (F, B, C, A, object))
class G(B, object, C, ):
pass
AreEqual(G.__mro__, (G, B, object, C, A))
class H(E, ):
pass
AreEqual(H.__mro__, (H, E, D, B, A, C, object))
try:
class H(A, B, E, ):
pass
AssertUnreachable()
except TypeError:
pass
class H(E, B, A, ):
pass
AreEqual(H.__mro__, (H, E, D, B, A, C, object))
|
[
"def",
"test_mixed_inheritance_mro",
"(",
")",
":",
"class",
"A",
":",
"pass",
"class",
"B",
"(",
"A",
",",
")",
":",
"pass",
"class",
"C",
"(",
"A",
",",
")",
":",
"pass",
"class",
"D",
"(",
"B",
",",
"C",
",",
")",
":",
"pass",
"class",
"E",
"(",
"D",
",",
"object",
",",
")",
":",
"pass",
"AreEqual",
"(",
"E",
".",
"__mro__",
",",
"(",
"E",
",",
"D",
",",
"B",
",",
"A",
",",
"C",
",",
"object",
")",
")",
"class",
"F",
"(",
"B",
",",
"C",
",",
"object",
",",
")",
":",
"pass",
"AreEqual",
"(",
"F",
".",
"__mro__",
",",
"(",
"F",
",",
"B",
",",
"C",
",",
"A",
",",
"object",
")",
")",
"class",
"G",
"(",
"B",
",",
"object",
",",
"C",
",",
")",
":",
"pass",
"AreEqual",
"(",
"G",
".",
"__mro__",
",",
"(",
"G",
",",
"B",
",",
"object",
",",
"C",
",",
"A",
")",
")",
"class",
"H",
"(",
"E",
",",
")",
":",
"pass",
"AreEqual",
"(",
"H",
".",
"__mro__",
",",
"(",
"H",
",",
"E",
",",
"D",
",",
"B",
",",
"A",
",",
"C",
",",
"object",
")",
")",
"try",
":",
"class",
"H",
"(",
"A",
",",
"B",
",",
"E",
",",
")",
":",
"pass",
"AssertUnreachable",
"(",
")",
"except",
"TypeError",
":",
"pass",
"class",
"H",
"(",
"E",
",",
"B",
",",
"A",
",",
")",
":",
"pass",
"AreEqual",
"(",
"H",
".",
"__mro__",
",",
"(",
"H",
",",
"E",
",",
"D",
",",
"B",
",",
"A",
",",
"C",
",",
"object",
")",
")"
] |
mixed inheritance from old-style & new-style classes .
|
train
| false
|
2,511
|
@step('{word:w} step passes')
def step_passes(context, word):
pass
|
[
"@",
"step",
"(",
"'{word:w} step passes'",
")",
"def",
"step_passes",
"(",
"context",
",",
"word",
")",
":",
"pass"
] |
step that always fails .
|
train
| false
|
2,512
|
@content_type('application/x-www-form-urlencoded')
def urlencoded(body, charset='ascii', **kwargs):
return parse_query_string(text(body, charset=charset), False)
|
[
"@",
"content_type",
"(",
"'application/x-www-form-urlencoded'",
")",
"def",
"urlencoded",
"(",
"body",
",",
"charset",
"=",
"'ascii'",
",",
"**",
"kwargs",
")",
":",
"return",
"parse_query_string",
"(",
"text",
"(",
"body",
",",
"charset",
"=",
"charset",
")",
",",
"False",
")"
] |
converts query strings into native python objects .
|
train
| true
|
2,513
|
def get_timezones():
with open('/usr/share/zoneinfo/zone.tab', 'r') as f:
lines = f.readlines()
lines = [line for line in lines if (line and (not line.startswith('#')) and ('/' in line))]
tmp = []
timezones = {'UTC': ['UTC']}
for line in lines:
columns = line.split(' DCTB ')
try:
tz_raw = columns[2].replace('\n', '')
except:
continue
tmp.append(tz_raw)
tmp.sort()
for tz_raw in tmp:
(tz_region, tz_country) = (tz_raw[:tz_raw.index('/')], tz_raw[(tz_raw.index('/') + 1):])
t = timezones.get(tz_region, [])
t.append(tz_country)
timezones[tz_region] = t
return timezones
|
[
"def",
"get_timezones",
"(",
")",
":",
"with",
"open",
"(",
"'/usr/share/zoneinfo/zone.tab'",
",",
"'r'",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"lines",
"=",
"[",
"line",
"for",
"line",
"in",
"lines",
"if",
"(",
"line",
"and",
"(",
"not",
"line",
".",
"startswith",
"(",
"'#'",
")",
")",
"and",
"(",
"'/'",
"in",
"line",
")",
")",
"]",
"tmp",
"=",
"[",
"]",
"timezones",
"=",
"{",
"'UTC'",
":",
"[",
"'UTC'",
"]",
"}",
"for",
"line",
"in",
"lines",
":",
"columns",
"=",
"line",
".",
"split",
"(",
"' DCTB '",
")",
"try",
":",
"tz_raw",
"=",
"columns",
"[",
"2",
"]",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
"except",
":",
"continue",
"tmp",
".",
"append",
"(",
"tz_raw",
")",
"tmp",
".",
"sort",
"(",
")",
"for",
"tz_raw",
"in",
"tmp",
":",
"(",
"tz_region",
",",
"tz_country",
")",
"=",
"(",
"tz_raw",
"[",
":",
"tz_raw",
".",
"index",
"(",
"'/'",
")",
"]",
",",
"tz_raw",
"[",
"(",
"tz_raw",
".",
"index",
"(",
"'/'",
")",
"+",
"1",
")",
":",
"]",
")",
"t",
"=",
"timezones",
".",
"get",
"(",
"tz_region",
",",
"[",
"]",
")",
"t",
".",
"append",
"(",
"tz_country",
")",
"timezones",
"[",
"tz_region",
"]",
"=",
"t",
"return",
"timezones"
] |
returns a dictionary or regions .
|
train
| false
|
2,514
|
@printing_func
def isolate_resources(resources):
return Classpath(creator=u'isolate_resources')
|
[
"@",
"printing_func",
"def",
"isolate_resources",
"(",
"resources",
")",
":",
"return",
"Classpath",
"(",
"creator",
"=",
"u'isolate_resources'",
")"
] |
copies resources into a private directory .
|
train
| false
|
2,515
|
def asdict(sobject):
return dict(items(sobject))
|
[
"def",
"asdict",
"(",
"sobject",
")",
":",
"return",
"dict",
"(",
"items",
"(",
"sobject",
")",
")"
] |
convert a sudsobject into a dictionary .
|
train
| false
|
2,516
|
def get_diff(minionfile, masterfile, saltenv='base'):
minionfile = os.path.expanduser(minionfile)
ret = ''
if (not os.path.exists(minionfile)):
ret = 'File {0} does not exist on the minion'.format(minionfile)
return ret
sfn = __salt__['cp.cache_file'](masterfile, saltenv)
if sfn:
with salt.utils.fopen(sfn, 'r') as src:
slines = src.readlines()
with salt.utils.fopen(minionfile, 'r') as name_:
nlines = name_.readlines()
if (''.join(nlines) != ''.join(slines)):
bdiff = _binary_replace(minionfile, sfn)
if bdiff:
ret += bdiff
else:
ret += ''.join(difflib.unified_diff(nlines, slines, minionfile, masterfile))
else:
ret = 'Failed to copy file from master'
return ret
|
[
"def",
"get_diff",
"(",
"minionfile",
",",
"masterfile",
",",
"saltenv",
"=",
"'base'",
")",
":",
"minionfile",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"minionfile",
")",
"ret",
"=",
"''",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"minionfile",
")",
")",
":",
"ret",
"=",
"'File {0} does not exist on the minion'",
".",
"format",
"(",
"minionfile",
")",
"return",
"ret",
"sfn",
"=",
"__salt__",
"[",
"'cp.cache_file'",
"]",
"(",
"masterfile",
",",
"saltenv",
")",
"if",
"sfn",
":",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"sfn",
",",
"'r'",
")",
"as",
"src",
":",
"slines",
"=",
"src",
".",
"readlines",
"(",
")",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"minionfile",
",",
"'r'",
")",
"as",
"name_",
":",
"nlines",
"=",
"name_",
".",
"readlines",
"(",
")",
"if",
"(",
"''",
".",
"join",
"(",
"nlines",
")",
"!=",
"''",
".",
"join",
"(",
"slines",
")",
")",
":",
"bdiff",
"=",
"_binary_replace",
"(",
"minionfile",
",",
"sfn",
")",
"if",
"bdiff",
":",
"ret",
"+=",
"bdiff",
"else",
":",
"ret",
"+=",
"''",
".",
"join",
"(",
"difflib",
".",
"unified_diff",
"(",
"nlines",
",",
"slines",
",",
"minionfile",
",",
"masterfile",
")",
")",
"else",
":",
"ret",
"=",
"'Failed to copy file from master'",
"return",
"ret"
] |
return the difference between 2 versions .
|
train
| false
|
2,517
|
def _lenient_lowercase(lst):
lowered = []
for value in lst:
try:
lowered.append(value.lower())
except AttributeError:
lowered.append(value)
return lowered
|
[
"def",
"_lenient_lowercase",
"(",
"lst",
")",
":",
"lowered",
"=",
"[",
"]",
"for",
"value",
"in",
"lst",
":",
"try",
":",
"lowered",
".",
"append",
"(",
"value",
".",
"lower",
"(",
")",
")",
"except",
"AttributeError",
":",
"lowered",
".",
"append",
"(",
"value",
")",
"return",
"lowered"
] |
lowercase elements of a list .
|
train
| false
|
2,518
|
def cancel_subscription(customer_id, subscription_id):
try:
customer = stripe.Customer.retrieve(customer_id)
if hasattr(customer, 'subscriptions'):
subscription = customer.subscriptions.retrieve(subscription_id)
return subscription.delete()
except stripe.error.StripeError:
pass
|
[
"def",
"cancel_subscription",
"(",
"customer_id",
",",
"subscription_id",
")",
":",
"try",
":",
"customer",
"=",
"stripe",
".",
"Customer",
".",
"retrieve",
"(",
"customer_id",
")",
"if",
"hasattr",
"(",
"customer",
",",
"'subscriptions'",
")",
":",
"subscription",
"=",
"customer",
".",
"subscriptions",
".",
"retrieve",
"(",
"subscription_id",
")",
"return",
"subscription",
".",
"delete",
"(",
")",
"except",
"stripe",
".",
"error",
".",
"StripeError",
":",
"pass"
] |
cancel stripe subscription .
|
train
| false
|
2,519
|
def get_unread_messages():
return frappe.db.sql(u" DCTB DCTB SELECT count(*)\n DCTB DCTB FROM `tabCommunication`\n DCTB DCTB WHERE communication_type in ('Chat', 'Notification')\n DCTB DCTB AND reference_doctype = 'User'\n DCTB DCTB AND reference_name = %s\n DCTB DCTB and modified >= DATE_SUB(NOW(),INTERVAL 1 YEAR)\n DCTB DCTB AND seen=0\n DCTB DCTB ", (frappe.session.user,))[0][0]
|
[
"def",
"get_unread_messages",
"(",
")",
":",
"return",
"frappe",
".",
"db",
".",
"sql",
"(",
"u\" DCTB DCTB SELECT count(*)\\n DCTB DCTB FROM `tabCommunication`\\n DCTB DCTB WHERE communication_type in ('Chat', 'Notification')\\n DCTB DCTB AND reference_doctype = 'User'\\n DCTB DCTB AND reference_name = %s\\n DCTB DCTB and modified >= DATE_SUB(NOW(),INTERVAL 1 YEAR)\\n DCTB DCTB AND seen=0\\n DCTB DCTB \"",
",",
"(",
"frappe",
".",
"session",
".",
"user",
",",
")",
")",
"[",
"0",
"]",
"[",
"0",
"]"
] |
returns unread .
|
train
| false
|
2,520
|
def get_model_field(model, field_name):
fields = get_field_parts(model, field_name)
return (fields[(-1)] if fields else None)
|
[
"def",
"get_model_field",
"(",
"model",
",",
"field_name",
")",
":",
"fields",
"=",
"get_field_parts",
"(",
"model",
",",
"field_name",
")",
"return",
"(",
"fields",
"[",
"(",
"-",
"1",
")",
"]",
"if",
"fields",
"else",
"None",
")"
] |
get a model field .
|
train
| false
|
2,521
|
def constant_time_compare(val1, val2):
if (len(val1) != len(val2)):
return False
result = 0
if (six.PY3 and isinstance(val1, bytes) and isinstance(val2, bytes)):
for (x, y) in zip(val1, val2):
result |= (x ^ y)
else:
for (x, y) in zip(val1, val2):
result |= (ord(x) ^ ord(y))
return (result == 0)
|
[
"def",
"constant_time_compare",
"(",
"val1",
",",
"val2",
")",
":",
"if",
"(",
"len",
"(",
"val1",
")",
"!=",
"len",
"(",
"val2",
")",
")",
":",
"return",
"False",
"result",
"=",
"0",
"if",
"(",
"six",
".",
"PY3",
"and",
"isinstance",
"(",
"val1",
",",
"bytes",
")",
"and",
"isinstance",
"(",
"val2",
",",
"bytes",
")",
")",
":",
"for",
"(",
"x",
",",
"y",
")",
"in",
"zip",
"(",
"val1",
",",
"val2",
")",
":",
"result",
"|=",
"(",
"x",
"^",
"y",
")",
"else",
":",
"for",
"(",
"x",
",",
"y",
")",
"in",
"zip",
"(",
"val1",
",",
"val2",
")",
":",
"result",
"|=",
"(",
"ord",
"(",
"x",
")",
"^",
"ord",
"(",
"y",
")",
")",
"return",
"(",
"result",
"==",
"0",
")"
] |
returns true if the two strings are equal .
|
train
| true
|
2,522
|
def get_temperature_from_humidity():
return _sensehat.get_temperature_from_humidity()
|
[
"def",
"get_temperature_from_humidity",
"(",
")",
":",
"return",
"_sensehat",
".",
"get_temperature_from_humidity",
"(",
")"
] |
gets the temperature in degrees celsius from the humidity sensor .
|
train
| false
|
2,523
|
def _proj_equal(a, b, check_active=True):
equal = (((a['active'] == b['active']) or (not check_active)) and (a['kind'] == b['kind']) and (a['desc'] == b['desc']) and (a['data']['col_names'] == b['data']['col_names']) and (a['data']['row_names'] == b['data']['row_names']) and (a['data']['ncol'] == b['data']['ncol']) and (a['data']['nrow'] == b['data']['nrow']) and np.all((a['data']['data'] == b['data']['data'])))
return equal
|
[
"def",
"_proj_equal",
"(",
"a",
",",
"b",
",",
"check_active",
"=",
"True",
")",
":",
"equal",
"=",
"(",
"(",
"(",
"a",
"[",
"'active'",
"]",
"==",
"b",
"[",
"'active'",
"]",
")",
"or",
"(",
"not",
"check_active",
")",
")",
"and",
"(",
"a",
"[",
"'kind'",
"]",
"==",
"b",
"[",
"'kind'",
"]",
")",
"and",
"(",
"a",
"[",
"'desc'",
"]",
"==",
"b",
"[",
"'desc'",
"]",
")",
"and",
"(",
"a",
"[",
"'data'",
"]",
"[",
"'col_names'",
"]",
"==",
"b",
"[",
"'data'",
"]",
"[",
"'col_names'",
"]",
")",
"and",
"(",
"a",
"[",
"'data'",
"]",
"[",
"'row_names'",
"]",
"==",
"b",
"[",
"'data'",
"]",
"[",
"'row_names'",
"]",
")",
"and",
"(",
"a",
"[",
"'data'",
"]",
"[",
"'ncol'",
"]",
"==",
"b",
"[",
"'data'",
"]",
"[",
"'ncol'",
"]",
")",
"and",
"(",
"a",
"[",
"'data'",
"]",
"[",
"'nrow'",
"]",
"==",
"b",
"[",
"'data'",
"]",
"[",
"'nrow'",
"]",
")",
"and",
"np",
".",
"all",
"(",
"(",
"a",
"[",
"'data'",
"]",
"[",
"'data'",
"]",
"==",
"b",
"[",
"'data'",
"]",
"[",
"'data'",
"]",
")",
")",
")",
"return",
"equal"
] |
test if two projectors are equal .
|
train
| false
|
2,526
|
def _revoked_to_list(revs):
list_ = []
for rev in revs:
for (rev_name, props) in six.iteritems(rev):
dict_ = {}
for prop in props:
for (propname, val) in six.iteritems(prop):
if isinstance(val, datetime.datetime):
val = val.strftime('%Y-%m-%d %H:%M:%S')
dict_[propname] = val
list_.append(dict_)
return list_
|
[
"def",
"_revoked_to_list",
"(",
"revs",
")",
":",
"list_",
"=",
"[",
"]",
"for",
"rev",
"in",
"revs",
":",
"for",
"(",
"rev_name",
",",
"props",
")",
"in",
"six",
".",
"iteritems",
"(",
"rev",
")",
":",
"dict_",
"=",
"{",
"}",
"for",
"prop",
"in",
"props",
":",
"for",
"(",
"propname",
",",
"val",
")",
"in",
"six",
".",
"iteritems",
"(",
"prop",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"datetime",
".",
"datetime",
")",
":",
"val",
"=",
"val",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
")",
"dict_",
"[",
"propname",
"]",
"=",
"val",
"list_",
".",
"append",
"(",
"dict_",
")",
"return",
"list_"
] |
turn the mess of ordereddicts and lists into a list of dicts for use in the crl module .
|
train
| true
|
2,527
|
def check_status():
return (salt.utils.http.query('http://github.com', status=True)['status'] == 200)
|
[
"def",
"check_status",
"(",
")",
":",
"return",
"(",
"salt",
".",
"utils",
".",
"http",
".",
"query",
"(",
"'http://github.com'",
",",
"status",
"=",
"True",
")",
"[",
"'status'",
"]",
"==",
"200",
")"
] |
a little helper function that checks an api error code and returns a nice message .
|
train
| false
|
2,528
|
def compiled_theano_function(fn):
@functools.wraps(fn)
def wrapped(self):
try:
func = self._compiled_functions[fn.__name__]
except (AttributeError, KeyError):
if (not hasattr(self, '_compiled_functions')):
self._compiled_functions = {}
self._compiled_functions[fn.__name__] = func = fn(self)
return func
return property(wrapped)
|
[
"def",
"compiled_theano_function",
"(",
"fn",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fn",
")",
"def",
"wrapped",
"(",
"self",
")",
":",
"try",
":",
"func",
"=",
"self",
".",
"_compiled_functions",
"[",
"fn",
".",
"__name__",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"self",
",",
"'_compiled_functions'",
")",
")",
":",
"self",
".",
"_compiled_functions",
"=",
"{",
"}",
"self",
".",
"_compiled_functions",
"[",
"fn",
".",
"__name__",
"]",
"=",
"func",
"=",
"fn",
"(",
"self",
")",
"return",
"func",
"return",
"property",
"(",
"wrapped",
")"
] |
method decorator that enables lazy on-demand compilation of theano functions .
|
train
| false
|
2,529
|
def fit_constrained_wrap(model, constraints, start_params=None, **fit_kwds):
self = model
from patsy import DesignInfo
lc = DesignInfo(self.exog_names).linear_constraint(constraints)
(R, q) = (lc.coefs, lc.constants)
(params, cov, res_constr) = fit_constrained(self, R, q, start_params=start_params, fit_kwds=fit_kwds)
res = self.fit(start_params=params, maxiter=0, warn_convergence=False)
res._results.params = params
res._results.normalized_cov_params = cov
k_constr = len(q)
res._results.df_resid += k_constr
res._results.df_model -= k_constr
res._results.constraints = lc
res._results.k_constr = k_constr
res._results.results_constrained = res_constr
return res
|
[
"def",
"fit_constrained_wrap",
"(",
"model",
",",
"constraints",
",",
"start_params",
"=",
"None",
",",
"**",
"fit_kwds",
")",
":",
"self",
"=",
"model",
"from",
"patsy",
"import",
"DesignInfo",
"lc",
"=",
"DesignInfo",
"(",
"self",
".",
"exog_names",
")",
".",
"linear_constraint",
"(",
"constraints",
")",
"(",
"R",
",",
"q",
")",
"=",
"(",
"lc",
".",
"coefs",
",",
"lc",
".",
"constants",
")",
"(",
"params",
",",
"cov",
",",
"res_constr",
")",
"=",
"fit_constrained",
"(",
"self",
",",
"R",
",",
"q",
",",
"start_params",
"=",
"start_params",
",",
"fit_kwds",
"=",
"fit_kwds",
")",
"res",
"=",
"self",
".",
"fit",
"(",
"start_params",
"=",
"params",
",",
"maxiter",
"=",
"0",
",",
"warn_convergence",
"=",
"False",
")",
"res",
".",
"_results",
".",
"params",
"=",
"params",
"res",
".",
"_results",
".",
"normalized_cov_params",
"=",
"cov",
"k_constr",
"=",
"len",
"(",
"q",
")",
"res",
".",
"_results",
".",
"df_resid",
"+=",
"k_constr",
"res",
".",
"_results",
".",
"df_model",
"-=",
"k_constr",
"res",
".",
"_results",
".",
"constraints",
"=",
"lc",
"res",
".",
"_results",
".",
"k_constr",
"=",
"k_constr",
"res",
".",
"_results",
".",
"results_constrained",
"=",
"res_constr",
"return",
"res"
] |
fit_constraint that returns a results instance this is a development version for fit_constrained methods or fit_constrained as standalone function .
|
train
| false
|
2,530
|
def test_fiducial_roudtrip(fullstack_icrs, fullstack_fiducial_altaz):
aacoo = fullstack_icrs.transform_to(fullstack_fiducial_altaz)
icrs2 = aacoo.transform_to(ICRS)
npt.assert_allclose(fullstack_icrs.ra.deg, icrs2.ra.deg)
npt.assert_allclose(fullstack_icrs.dec.deg, icrs2.dec.deg)
|
[
"def",
"test_fiducial_roudtrip",
"(",
"fullstack_icrs",
",",
"fullstack_fiducial_altaz",
")",
":",
"aacoo",
"=",
"fullstack_icrs",
".",
"transform_to",
"(",
"fullstack_fiducial_altaz",
")",
"icrs2",
"=",
"aacoo",
".",
"transform_to",
"(",
"ICRS",
")",
"npt",
".",
"assert_allclose",
"(",
"fullstack_icrs",
".",
"ra",
".",
"deg",
",",
"icrs2",
".",
"ra",
".",
"deg",
")",
"npt",
".",
"assert_allclose",
"(",
"fullstack_icrs",
".",
"dec",
".",
"deg",
",",
"icrs2",
".",
"dec",
".",
"deg",
")"
] |
test the full transform from icrs <-> altaz .
|
train
| false
|
2,531
|
def profile_end(name=None):
last_name = _profiles_stack.pop()
name = (name or last_name)
if (not (name in _profiles)):
_profiles[name] = 0.0
_profiles[name] += (time.time() - _profiles_running[name])
|
[
"def",
"profile_end",
"(",
"name",
"=",
"None",
")",
":",
"last_name",
"=",
"_profiles_stack",
".",
"pop",
"(",
")",
"name",
"=",
"(",
"name",
"or",
"last_name",
")",
"if",
"(",
"not",
"(",
"name",
"in",
"_profiles",
")",
")",
":",
"_profiles",
"[",
"name",
"]",
"=",
"0.0",
"_profiles",
"[",
"name",
"]",
"+=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"_profiles_running",
"[",
"name",
"]",
")"
] |
ends a profiling interval with specific name .
|
train
| false
|
2,532
|
@pytest.mark.parametrize(u'pos', test_pos_bad)
def test_slices_no_overlap(pos):
with pytest.raises(NoOverlapError):
overlap_slices((5, 5), (2, 2), pos)
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"u'pos'",
",",
"test_pos_bad",
")",
"def",
"test_slices_no_overlap",
"(",
"pos",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"NoOverlapError",
")",
":",
"overlap_slices",
"(",
"(",
"5",
",",
"5",
")",
",",
"(",
"2",
",",
"2",
")",
",",
"pos",
")"
] |
if there is no overlap between arrays .
|
train
| false
|
2,533
|
def obtain_model(show_ver):
match = re.search('Cisco (.+?) .+bytes of memory', show_ver)
if match:
return match.group(1)
else:
return None
|
[
"def",
"obtain_model",
"(",
"show_ver",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"'Cisco (.+?) .+bytes of memory'",
",",
"show_ver",
")",
"if",
"match",
":",
"return",
"match",
".",
"group",
"(",
"1",
")",
"else",
":",
"return",
"None"
] |
function that processes show version information to obtain the model .
|
train
| false
|
2,534
|
def get_server(*args, **kwargs):
from django.conf import settings
server_name = getattr(settings, 'LETTUCE_TEST_SERVER', 'lettuce.django.server.DefaultServer')
(module, klass) = server_name.rsplit('.', 1)
Server = getattr(__import__(module, fromlist=[klass]), klass)
global server, django_url
server = Server(*args, **kwargs)
django_url = server.url
return server
|
[
"def",
"get_server",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"from",
"django",
".",
"conf",
"import",
"settings",
"server_name",
"=",
"getattr",
"(",
"settings",
",",
"'LETTUCE_TEST_SERVER'",
",",
"'lettuce.django.server.DefaultServer'",
")",
"(",
"module",
",",
"klass",
")",
"=",
"server_name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"Server",
"=",
"getattr",
"(",
"__import__",
"(",
"module",
",",
"fromlist",
"=",
"[",
"klass",
"]",
")",
",",
"klass",
")",
"global",
"server",
",",
"django_url",
"server",
"=",
"Server",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"django_url",
"=",
"server",
".",
"url",
"return",
"server"
] |
look up the server we are using and set it as the global .
|
train
| false
|
2,535
|
def qn(tag):
(prefix, tagroot) = tag.split(u':')
uri = nsmap[prefix]
return (u'{%s}%s' % (uri, tagroot))
|
[
"def",
"qn",
"(",
"tag",
")",
":",
"(",
"prefix",
",",
"tagroot",
")",
"=",
"tag",
".",
"split",
"(",
"u':'",
")",
"uri",
"=",
"nsmap",
"[",
"prefix",
"]",
"return",
"(",
"u'{%s}%s'",
"%",
"(",
"uri",
",",
"tagroot",
")",
")"
] |
stands for "qualified name" .
|
train
| false
|
2,537
|
@contextlib.contextmanager
def fake_io(write_func):
old_stdout = sys.stdout
old_stderr = sys.stderr
fake_stderr = FakeIOStream(write_func)
fake_stdout = FakeIOStream(write_func)
sys.stderr = fake_stderr
sys.stdout = fake_stdout
try:
(yield)
finally:
if (sys.stdout is fake_stdout):
sys.stdout = old_stdout
if (sys.stderr is fake_stderr):
sys.stderr = old_stderr
|
[
"@",
"contextlib",
".",
"contextmanager",
"def",
"fake_io",
"(",
"write_func",
")",
":",
"old_stdout",
"=",
"sys",
".",
"stdout",
"old_stderr",
"=",
"sys",
".",
"stderr",
"fake_stderr",
"=",
"FakeIOStream",
"(",
"write_func",
")",
"fake_stdout",
"=",
"FakeIOStream",
"(",
"write_func",
")",
"sys",
".",
"stderr",
"=",
"fake_stderr",
"sys",
".",
"stdout",
"=",
"fake_stdout",
"try",
":",
"(",
"yield",
")",
"finally",
":",
"if",
"(",
"sys",
".",
"stdout",
"is",
"fake_stdout",
")",
":",
"sys",
".",
"stdout",
"=",
"old_stdout",
"if",
"(",
"sys",
".",
"stderr",
"is",
"fake_stderr",
")",
":",
"sys",
".",
"stderr",
"=",
"old_stderr"
] |
run code with stdout and stderr replaced by fakeiostreams .
|
train
| false
|
2,540
|
def render_to_kml(*args, **kwargs):
return HttpResponse(loader.render_to_string(*args, **kwargs), content_type='application/vnd.google-earth.kml+xml')
|
[
"def",
"render_to_kml",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"HttpResponse",
"(",
"loader",
".",
"render_to_string",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
",",
"content_type",
"=",
"'application/vnd.google-earth.kml+xml'",
")"
] |
renders the response as kml .
|
train
| false
|
2,541
|
@handle_response_format
@treeio_login_required
def weblink_delete(request, weblink_id, response_format='html'):
link = get_object_or_404(WebLink, pk=weblink_id)
if (not request.user.profile.has_permission(link, mode='w')):
return user_denied(request, message="You don't have access to this Web Link")
if request.POST:
if ('delete' in request.POST):
if ('trash' in request.POST):
link.trash = True
link.save()
else:
link.delete()
return HttpResponseRedirect(reverse('document_index'))
elif ('cancel' in request.POST):
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
context = _get_default_context(request)
context.update({'link': link})
return render_to_response('documents/weblink_delete', context, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"weblink_delete",
"(",
"request",
",",
"weblink_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"link",
"=",
"get_object_or_404",
"(",
"WebLink",
",",
"pk",
"=",
"weblink_id",
")",
"if",
"(",
"not",
"request",
".",
"user",
".",
"profile",
".",
"has_permission",
"(",
"link",
",",
"mode",
"=",
"'w'",
")",
")",
":",
"return",
"user_denied",
"(",
"request",
",",
"message",
"=",
"\"You don't have access to this Web Link\"",
")",
"if",
"request",
".",
"POST",
":",
"if",
"(",
"'delete'",
"in",
"request",
".",
"POST",
")",
":",
"if",
"(",
"'trash'",
"in",
"request",
".",
"POST",
")",
":",
"link",
".",
"trash",
"=",
"True",
"link",
".",
"save",
"(",
")",
"else",
":",
"link",
".",
"delete",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'document_index'",
")",
")",
"elif",
"(",
"'cancel'",
"in",
"request",
".",
"POST",
")",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'documents_weblink_view'",
",",
"args",
"=",
"[",
"link",
".",
"id",
"]",
")",
")",
"context",
"=",
"_get_default_context",
"(",
"request",
")",
"context",
".",
"update",
"(",
"{",
"'link'",
":",
"link",
"}",
")",
"return",
"render_to_response",
"(",
"'documents/weblink_delete'",
",",
"context",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
weblink delete .
|
train
| false
|
2,542
|
def parse_rows_with(reader, parsers):
for row in reader:
(yield parse_row(row, parsers))
|
[
"def",
"parse_rows_with",
"(",
"reader",
",",
"parsers",
")",
":",
"for",
"row",
"in",
"reader",
":",
"(",
"yield",
"parse_row",
"(",
"row",
",",
"parsers",
")",
")"
] |
wrap a reader to apply the parsers to each of its rows .
|
train
| false
|
2,544
|
def show_letter(letter, text_color=None, back_color=None):
text_color = (text_color or [255, 255, 255])
back_color = (back_color or [0, 0, 0])
_sensehat.show_letter(letter, text_color, back_color)
return {'letter': letter}
|
[
"def",
"show_letter",
"(",
"letter",
",",
"text_color",
"=",
"None",
",",
"back_color",
"=",
"None",
")",
":",
"text_color",
"=",
"(",
"text_color",
"or",
"[",
"255",
",",
"255",
",",
"255",
"]",
")",
"back_color",
"=",
"(",
"back_color",
"or",
"[",
"0",
",",
"0",
",",
"0",
"]",
")",
"_sensehat",
".",
"show_letter",
"(",
"letter",
",",
"text_color",
",",
"back_color",
")",
"return",
"{",
"'letter'",
":",
"letter",
"}"
] |
displays a single letter on the led matrix .
|
train
| true
|
2,546
|
def get_distribution_id(vm_):
distributions = _query('avail', 'distributions')['DATA']
vm_image_name = config.get_cloud_config_value('image', vm_, __opts__)
distro_id = ''
for distro in distributions:
if (vm_image_name == distro['LABEL']):
distro_id = distro['DISTRIBUTIONID']
return distro_id
if (not distro_id):
raise SaltCloudNotFound("The DistributionID for the '{0}' profile could not be found.\nThe '{1}' instance could not be provisioned. The following distributions are available:\n{2}".format(vm_image_name, vm_['name'], pprint.pprint(sorted([distro['LABEL'].encode(__salt_system_encoding__) for distro in distributions]))))
|
[
"def",
"get_distribution_id",
"(",
"vm_",
")",
":",
"distributions",
"=",
"_query",
"(",
"'avail'",
",",
"'distributions'",
")",
"[",
"'DATA'",
"]",
"vm_image_name",
"=",
"config",
".",
"get_cloud_config_value",
"(",
"'image'",
",",
"vm_",
",",
"__opts__",
")",
"distro_id",
"=",
"''",
"for",
"distro",
"in",
"distributions",
":",
"if",
"(",
"vm_image_name",
"==",
"distro",
"[",
"'LABEL'",
"]",
")",
":",
"distro_id",
"=",
"distro",
"[",
"'DISTRIBUTIONID'",
"]",
"return",
"distro_id",
"if",
"(",
"not",
"distro_id",
")",
":",
"raise",
"SaltCloudNotFound",
"(",
"\"The DistributionID for the '{0}' profile could not be found.\\nThe '{1}' instance could not be provisioned. The following distributions are available:\\n{2}\"",
".",
"format",
"(",
"vm_image_name",
",",
"vm_",
"[",
"'name'",
"]",
",",
"pprint",
".",
"pprint",
"(",
"sorted",
"(",
"[",
"distro",
"[",
"'LABEL'",
"]",
".",
"encode",
"(",
"__salt_system_encoding__",
")",
"for",
"distro",
"in",
"distributions",
"]",
")",
")",
")",
")"
] |
returns the distribution id for a vm vm_ the vm to get the distribution id for .
|
train
| true
|
2,548
|
def thread_test(method):
@functools.wraps(method)
def Wrapper(self):
thread = TestThread(partial(method, self), partial(self.io_loop.add_callback, self.stop))
thread.start()
self.wait()
thread.MaybeRaise()
return Wrapper
|
[
"def",
"thread_test",
"(",
"method",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"method",
")",
"def",
"Wrapper",
"(",
"self",
")",
":",
"thread",
"=",
"TestThread",
"(",
"partial",
"(",
"method",
",",
"self",
")",
",",
"partial",
"(",
"self",
".",
"io_loop",
".",
"add_callback",
",",
"self",
".",
"stop",
")",
")",
"thread",
".",
"start",
"(",
")",
"self",
".",
"wait",
"(",
")",
"thread",
".",
"MaybeRaise",
"(",
")",
"return",
"Wrapper"
] |
decorator for tests which need to be run synchronously .
|
train
| false
|
2,549
|
@themes.command('remove')
@click.argument('theme_identifier')
@click.option('--force', '-f', default=False, is_flag=True, help='Removes the theme without asking for confirmation.')
def remove_theme(theme_identifier, force):
validate_theme(theme_identifier)
if ((not force) and (not click.confirm(click.style('Are you sure?', fg='magenta')))):
sys.exit(0)
theme = get_theme(theme_identifier)
click.secho('[+] Removing theme from filesystem...', fg='cyan')
shutil.rmtree(theme.path, ignore_errors=False, onerror=None)
|
[
"@",
"themes",
".",
"command",
"(",
"'remove'",
")",
"@",
"click",
".",
"argument",
"(",
"'theme_identifier'",
")",
"@",
"click",
".",
"option",
"(",
"'--force'",
",",
"'-f'",
",",
"default",
"=",
"False",
",",
"is_flag",
"=",
"True",
",",
"help",
"=",
"'Removes the theme without asking for confirmation.'",
")",
"def",
"remove_theme",
"(",
"theme_identifier",
",",
"force",
")",
":",
"validate_theme",
"(",
"theme_identifier",
")",
"if",
"(",
"(",
"not",
"force",
")",
"and",
"(",
"not",
"click",
".",
"confirm",
"(",
"click",
".",
"style",
"(",
"'Are you sure?'",
",",
"fg",
"=",
"'magenta'",
")",
")",
")",
")",
":",
"sys",
".",
"exit",
"(",
"0",
")",
"theme",
"=",
"get_theme",
"(",
"theme_identifier",
")",
"click",
".",
"secho",
"(",
"'[+] Removing theme from filesystem...'",
",",
"fg",
"=",
"'cyan'",
")",
"shutil",
".",
"rmtree",
"(",
"theme",
".",
"path",
",",
"ignore_errors",
"=",
"False",
",",
"onerror",
"=",
"None",
")"
] |
removes a theme from the filesystem .
|
train
| false
|
2,550
|
def py2ldap(val):
if isinstance(val, bool):
return (u'TRUE' if val else u'FALSE')
else:
return six.text_type(val)
|
[
"def",
"py2ldap",
"(",
"val",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"bool",
")",
":",
"return",
"(",
"u'TRUE'",
"if",
"val",
"else",
"u'FALSE'",
")",
"else",
":",
"return",
"six",
".",
"text_type",
"(",
"val",
")"
] |
type convert a python value to a type accepted by ldap .
|
train
| false
|
2,551
|
def test_almost_but_not_quite_daophot():
lines = ['# some header info', "#F header info beginning with 'F'", '1 2 3', '4 5 6', '7 8 9']
dat = ascii.read(lines)
assert (len(dat) == 3)
|
[
"def",
"test_almost_but_not_quite_daophot",
"(",
")",
":",
"lines",
"=",
"[",
"'# some header info'",
",",
"\"#F header info beginning with 'F'\"",
",",
"'1 2 3'",
",",
"'4 5 6'",
",",
"'7 8 9'",
"]",
"dat",
"=",
"ascii",
".",
"read",
"(",
"lines",
")",
"assert",
"(",
"len",
"(",
"dat",
")",
"==",
"3",
")"
] |
regression test for #3319 .
|
train
| false
|
2,554
|
def read_version():
variables = {}
with open(VERSION_FILE) as f:
exec compile(f.read(), 'version.py', 'exec') in variables
return variables['VERSION']
|
[
"def",
"read_version",
"(",
")",
":",
"variables",
"=",
"{",
"}",
"with",
"open",
"(",
"VERSION_FILE",
")",
"as",
"f",
":",
"exec",
"compile",
"(",
"f",
".",
"read",
"(",
")",
",",
"'version.py'",
",",
"'exec'",
")",
"in",
"variables",
"return",
"variables",
"[",
"'VERSION'",
"]"
] |
read directly the errbot/version .
|
train
| false
|
2,555
|
def getKeyA(row, column, prefix=''):
return ('%sa%s%s' % (prefix, row, column))
|
[
"def",
"getKeyA",
"(",
"row",
",",
"column",
",",
"prefix",
"=",
"''",
")",
":",
"return",
"(",
"'%sa%s%s'",
"%",
"(",
"prefix",
",",
"row",
",",
"column",
")",
")"
] |
get the a format key string from row & column .
|
train
| false
|
2,556
|
def getent(refresh=False):
if (('group.getent' in __context__) and (not refresh)):
return __context__['group.getent']
ret = []
pythoncom.CoInitialize()
nt = win32com.client.Dispatch('AdsNameSpaces')
results = nt.GetObject('', 'WinNT://.')
results.Filter = ['group']
for result in results:
member_list = []
for member in result.members():
member_list.append(member.AdsPath.replace('WinNT://', '').replace('/', '\\').encode('ascii', 'backslashreplace'))
group = {'gid': __salt__['file.group_to_gid'](result.name), 'members': member_list, 'name': result.name, 'passwd': 'x'}
ret.append(group)
__context__['group.getent'] = ret
return ret
|
[
"def",
"getent",
"(",
"refresh",
"=",
"False",
")",
":",
"if",
"(",
"(",
"'group.getent'",
"in",
"__context__",
")",
"and",
"(",
"not",
"refresh",
")",
")",
":",
"return",
"__context__",
"[",
"'group.getent'",
"]",
"ret",
"=",
"[",
"]",
"pythoncom",
".",
"CoInitialize",
"(",
")",
"nt",
"=",
"win32com",
".",
"client",
".",
"Dispatch",
"(",
"'AdsNameSpaces'",
")",
"results",
"=",
"nt",
".",
"GetObject",
"(",
"''",
",",
"'WinNT://.'",
")",
"results",
".",
"Filter",
"=",
"[",
"'group'",
"]",
"for",
"result",
"in",
"results",
":",
"member_list",
"=",
"[",
"]",
"for",
"member",
"in",
"result",
".",
"members",
"(",
")",
":",
"member_list",
".",
"append",
"(",
"member",
".",
"AdsPath",
".",
"replace",
"(",
"'WinNT://'",
",",
"''",
")",
".",
"replace",
"(",
"'/'",
",",
"'\\\\'",
")",
".",
"encode",
"(",
"'ascii'",
",",
"'backslashreplace'",
")",
")",
"group",
"=",
"{",
"'gid'",
":",
"__salt__",
"[",
"'file.group_to_gid'",
"]",
"(",
"result",
".",
"name",
")",
",",
"'members'",
":",
"member_list",
",",
"'name'",
":",
"result",
".",
"name",
",",
"'passwd'",
":",
"'x'",
"}",
"ret",
".",
"append",
"(",
"group",
")",
"__context__",
"[",
"'group.getent'",
"]",
"=",
"ret",
"return",
"ret"
] |
return the list of all info for all users cli example: .
|
train
| false
|
2,558
|
def extract_subdomain(host=None, base_domain=None):
if (host is None):
host = request.host
if (base_domain is None):
base_domain = g.domain
if (not host):
return ''
end_index = (host.find(base_domain) - 1)
if (end_index < 0):
return ''
return host[:end_index]
|
[
"def",
"extract_subdomain",
"(",
"host",
"=",
"None",
",",
"base_domain",
"=",
"None",
")",
":",
"if",
"(",
"host",
"is",
"None",
")",
":",
"host",
"=",
"request",
".",
"host",
"if",
"(",
"base_domain",
"is",
"None",
")",
":",
"base_domain",
"=",
"g",
".",
"domain",
"if",
"(",
"not",
"host",
")",
":",
"return",
"''",
"end_index",
"=",
"(",
"host",
".",
"find",
"(",
"base_domain",
")",
"-",
"1",
")",
"if",
"(",
"end_index",
"<",
"0",
")",
":",
"return",
"''",
"return",
"host",
"[",
":",
"end_index",
"]"
] |
try to extract a subdomain from the request .
|
train
| false
|
2,560
|
def _file_url(path):
return urlutils.file_url(str(path))
|
[
"def",
"_file_url",
"(",
"path",
")",
":",
"return",
"urlutils",
".",
"file_url",
"(",
"str",
"(",
"path",
")",
")"
] |
return a file:// url for the given localpath .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.