id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
28,508 | @events.route('/<int:event_id>/role-invite/<hash>', methods=['GET', 'POST'])
def user_role_invite(event_id, hash):
event = DataGetter.get_event(event_id)
user = current_user
role_invite = DataGetter.get_event_role_invite(event.id, hash, email=user.email)
if role_invite:
if role_invite.has_expired():
delete_from_db(role_invite, 'Deleted RoleInvite')
flash('Sorry, the invitation link has expired.', 'error')
return redirect(url_for('.details_view', event_id=event.id))
if user.has_role(event.id):
flash('You have already been assigned a Role in the Event.', 'warning')
return redirect(url_for('events.details_view', event_id=event_id))
role = role_invite.role
data = dict()
data['user_email'] = role_invite.email
data['user_role'] = role.name
DataManager.add_role_to_event(data, event.id)
delete_from_db(role_invite, 'Deleted RoleInvite')
flash(('You have been added as a %s' % role.title_name))
return redirect(url_for('.details_view', event_id=event.id))
else:
abort(404)
| [
"@",
"events",
".",
"route",
"(",
"'/<int:event_id>/role-invite/<hash>'",
",",
"methods",
"=",
"[",
"'GET'",
",",
"'POST'",
"]",
")",
"def",
"user_role_invite",
"(",
"event_id",
",",
"hash",
")",
":",
"event",
"=",
"DataGetter",
".",
"get_event",
"(",
"event... | accept user-role invite for the event . | train | false |
28,509 | def _override_all_archs(_config_vars):
if ('ARCHFLAGS' in os.environ):
arch = os.environ['ARCHFLAGS']
for cv in _UNIVERSAL_CONFIG_VARS:
if ((cv in _config_vars) and ('-arch' in _config_vars[cv])):
flags = _config_vars[cv]
flags = re.sub('-arch\\s+\\w+\\s', ' ', flags)
flags = ((flags + ' ') + arch)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
| [
"def",
"_override_all_archs",
"(",
"_config_vars",
")",
":",
"if",
"(",
"'ARCHFLAGS'",
"in",
"os",
".",
"environ",
")",
":",
"arch",
"=",
"os",
".",
"environ",
"[",
"'ARCHFLAGS'",
"]",
"for",
"cv",
"in",
"_UNIVERSAL_CONFIG_VARS",
":",
"if",
"(",
"(",
"cv... | allow override of all archs with archflags env var . | train | false |
28,510 | def test_isfile():
schema = vol.Schema(cv.isfile)
fake_file = 'this-file-does-not.exist'
assert (not os.path.isfile(fake_file))
for value in ('invalid', None, (-1), 0, 80000, fake_file):
with pytest.raises(vol.Invalid):
schema(value)
with patch('os.path.isfile', Mock(return_value=True)):
with patch('os.access', Mock(return_value=True)):
schema('test.txt')
| [
"def",
"test_isfile",
"(",
")",
":",
"schema",
"=",
"vol",
".",
"Schema",
"(",
"cv",
".",
"isfile",
")",
"fake_file",
"=",
"'this-file-does-not.exist'",
"assert",
"(",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"fake_file",
")",
")",
"for",
"value",
... | validate that the value is an existing file . | train | false |
28,511 | def named_tuple_factory(colnames, rows):
clean_column_names = map(_clean_column_name, colnames)
try:
Row = namedtuple('Row', clean_column_names)
except Exception:
clean_column_names = list(map(_clean_column_name, colnames))
log.warning(('Failed creating named tuple for results with column names %s (cleaned: %s) (see Python \'namedtuple\' documentation for details on name rules). Results will be returned with positional names. Avoid this by choosing different names, using SELECT "<col name>" AS aliases, or specifying a different row_factory on your Session' % (colnames, clean_column_names)))
Row = namedtuple('Row', _sanitize_identifiers(clean_column_names))
return [Row(*row) for row in rows]
| [
"def",
"named_tuple_factory",
"(",
"colnames",
",",
"rows",
")",
":",
"clean_column_names",
"=",
"map",
"(",
"_clean_column_name",
",",
"colnames",
")",
"try",
":",
"Row",
"=",
"namedtuple",
"(",
"'Row'",
",",
"clean_column_names",
")",
"except",
"Exception",
... | returns each row as a namedtuple <URL#collections . | train | false |
28,512 | def FindMissingImages(referenced_images, asset_images):
images = set(asset_images)
for ref in referenced_images:
if (ref not in images):
print ('%s does not exist' % ref)
if (ImageName2X(ref) not in images):
print ('%s does not exist' % ref_2x)
| [
"def",
"FindMissingImages",
"(",
"referenced_images",
",",
"asset_images",
")",
":",
"images",
"=",
"set",
"(",
"asset_images",
")",
"for",
"ref",
"in",
"referenced_images",
":",
"if",
"(",
"ref",
"not",
"in",
"images",
")",
":",
"print",
"(",
"'%s does not ... | check that every referenced image is found in asset_images . | train | false |
28,514 | def get_sys_meta_prefix(server_type):
return ('x-%s-%s-' % (server_type.lower(), 'sysmeta'))
| [
"def",
"get_sys_meta_prefix",
"(",
"server_type",
")",
":",
"return",
"(",
"'x-%s-%s-'",
"%",
"(",
"server_type",
".",
"lower",
"(",
")",
",",
"'sysmeta'",
")",
")"
] | returns the prefix for system metadata headers for given server type . | train | false |
28,515 | def access_log_level(lvl):
if (lvl not in (logging.DEBUG, logging.WARN, logging.ERROR, logging.CRITICAL, logging.FATAL)):
raise ValueError(('%s is not a valid logging level' % (lvl,)))
def deco_view(func):
func.access_log_level = lvl
return func
return deco_view
| [
"def",
"access_log_level",
"(",
"lvl",
")",
":",
"if",
"(",
"lvl",
"not",
"in",
"(",
"logging",
".",
"DEBUG",
",",
"logging",
".",
"WARN",
",",
"logging",
".",
"ERROR",
",",
"logging",
".",
"CRITICAL",
",",
"logging",
".",
"FATAL",
")",
")",
":",
"... | decorator to set the access log level of a view function . | train | false |
28,516 | def create_cloudformation_stack(template_url, parameters, aws_config):
stack_name = (CLOUDFORMATION_STACK_NAME + str(int(time.time())))
output = aws_output(['cloudformation', 'create-stack', '--disable-rollback', '--parameters', json.dumps(parameters), '--stack-name', stack_name, '--template-url', template_url], aws_config)
output = json.loads(output)
stack_id = output['StackId']
Message.new(cloudformation_stack_id=stack_id)
return wait_for_stack_status(stack_id, 'CREATE_COMPLETE', aws_config)
| [
"def",
"create_cloudformation_stack",
"(",
"template_url",
",",
"parameters",
",",
"aws_config",
")",
":",
"stack_name",
"=",
"(",
"CLOUDFORMATION_STACK_NAME",
"+",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
")",
"output",
"=",
"aws_out... | create a cloudformation stack . | train | false |
28,519 | def random_combination_with_replacement(iterable, r):
pool = tuple(iterable)
n = len(pool)
indices = sorted((random.randrange(n) for i in xrange(r)))
return tuple((pool[i] for i in indices))
| [
"def",
"random_combination_with_replacement",
"(",
"iterable",
",",
"r",
")",
":",
"pool",
"=",
"tuple",
"(",
"iterable",
")",
"n",
"=",
"len",
"(",
"pool",
")",
"indices",
"=",
"sorted",
"(",
"(",
"random",
".",
"randrange",
"(",
"n",
")",
"for",
"i",... | random_combination -> tuple arguments: iterable: an iterable . | train | true |
28,520 | def address_from_address_family(address_family, single_address):
name = single_address.name
if (name is None):
name = basename(single_address.directory)
if (name not in address_family.objects_by_name):
_raise_did_you_mean(address_family, single_address.name)
return Addresses(tuple([Address(address_family.namespace, name)]))
| [
"def",
"address_from_address_family",
"(",
"address_family",
",",
"single_address",
")",
":",
"name",
"=",
"single_address",
".",
"name",
"if",
"(",
"name",
"is",
"None",
")",
":",
"name",
"=",
"basename",
"(",
"single_address",
".",
"directory",
")",
"if",
... | given an addressfamily and a singleaddress . | train | false |
28,522 | def _get_validation_exclusions(obj, pk=None, slug_field=None, lookup_field=None):
include = []
if pk:
pk_field = obj._meta.pk
while pk_field.rel:
pk_field = pk_field.rel.to._meta.pk
include.append(pk_field.name)
if slug_field:
include.append(slug_field)
if (lookup_field and (lookup_field != 'pk')):
include.append(lookup_field)
return [field.name for field in obj._meta.fields if (field.name not in include)]
| [
"def",
"_get_validation_exclusions",
"(",
"obj",
",",
"pk",
"=",
"None",
",",
"slug_field",
"=",
"None",
",",
"lookup_field",
"=",
"None",
")",
":",
"include",
"=",
"[",
"]",
"if",
"pk",
":",
"pk_field",
"=",
"obj",
".",
"_meta",
".",
"pk",
"while",
... | given a model instance . | train | false |
28,523 | def is_worse_than(lhs, rhs):
return (statuses.index(lhs) < statuses.index(rhs))
| [
"def",
"is_worse_than",
"(",
"lhs",
",",
"rhs",
")",
":",
"return",
"(",
"statuses",
".",
"index",
"(",
"lhs",
")",
"<",
"statuses",
".",
"index",
"(",
"rhs",
")",
")"
] | compare two statuses and return a boolean indicating if the lhs status is worse than the rhs status . | train | false |
28,524 | def bessel(N, Wn, btype='low', analog=False, output='ba', norm='phase'):
return iirfilter(N, Wn, btype=btype, analog=analog, output=output, ftype=('bessel_' + norm))
| [
"def",
"bessel",
"(",
"N",
",",
"Wn",
",",
"btype",
"=",
"'low'",
",",
"analog",
"=",
"False",
",",
"output",
"=",
"'ba'",
",",
"norm",
"=",
"'phase'",
")",
":",
"return",
"iirfilter",
"(",
"N",
",",
"Wn",
",",
"btype",
"=",
"btype",
",",
"analog... | bessel/thomson digital and analog filter design . | train | false |
28,525 | def huber_loss(x, t, delta):
return HuberLoss(delta=delta)(x, t)
| [
"def",
"huber_loss",
"(",
"x",
",",
"t",
",",
"delta",
")",
":",
"return",
"HuberLoss",
"(",
"delta",
"=",
"delta",
")",
"(",
"x",
",",
"t",
")"
] | loss function which is less sensitive to outliers in data than mse . | train | false |
28,526 | @register(u'beginning-of-line')
def beginning_of_line(event):
buff = event.current_buffer
buff.cursor_position += buff.document.get_start_of_line_position(after_whitespace=False)
| [
"@",
"register",
"(",
"u'beginning-of-line'",
")",
"def",
"beginning_of_line",
"(",
"event",
")",
":",
"buff",
"=",
"event",
".",
"current_buffer",
"buff",
".",
"cursor_position",
"+=",
"buff",
".",
"document",
".",
"get_start_of_line_position",
"(",
"after_whites... | move to the start of the current line . | train | true |
28,527 | @blueprint.route('/resources/<resource>/meters/<meter>')
def list_samples_by_resource(resource, meter):
return _list_samples(resource=resource, meter=meter, project=acl.get_limited_to_project(flask.request.headers))
| [
"@",
"blueprint",
".",
"route",
"(",
"'/resources/<resource>/meters/<meter>'",
")",
"def",
"list_samples_by_resource",
"(",
"resource",
",",
"meter",
")",
":",
"return",
"_list_samples",
"(",
"resource",
"=",
"resource",
",",
"meter",
"=",
"meter",
",",
"project",... | return a list of raw samples for the resource . | train | false |
28,528 | def set_namespace_gateway(port_dev, gateway_ip):
if (not port_dev.namespace):
tools.fail('tests should not change test machine gateway')
port_dev.route.add_gateway(gateway_ip)
| [
"def",
"set_namespace_gateway",
"(",
"port_dev",
",",
"gateway_ip",
")",
":",
"if",
"(",
"not",
"port_dev",
".",
"namespace",
")",
":",
"tools",
".",
"fail",
"(",
"'tests should not change test machine gateway'",
")",
"port_dev",
".",
"route",
".",
"add_gateway",
... | set gateway for the namespace associated to the port . | train | false |
28,529 | def s3_debug(message, value=None):
output = ('S3 Debug: %s' % s3_unicode(message))
if value:
output = ('%s: %s' % (output, s3_unicode(value)))
try:
print >>sys.stderr, output
except:
print >>sys.stderr, 'Debug crashed'
| [
"def",
"s3_debug",
"(",
"message",
",",
"value",
"=",
"None",
")",
":",
"output",
"=",
"(",
"'S3 Debug: %s'",
"%",
"s3_unicode",
"(",
"message",
")",
")",
"if",
"value",
":",
"output",
"=",
"(",
"'%s: %s'",
"%",
"(",
"output",
",",
"s3_unicode",
"(",
... | debug function provide an easy . | train | false |
28,531 | def co_code_findloadednames(co):
from opcode import HAVE_ARGUMENT, opmap
hasloadname = (opmap['LOAD_NAME'], opmap['LOAD_GLOBAL'], opmap['LOAD_FAST'])
code = co.co_code
nargs = co.co_argcount
len_co_names = len(co.co_names)
indexset = {}
n = len(code)
i = 0
while (i < n):
c = code[i]
op = ord(c)
i = (i + 1)
if (op >= HAVE_ARGUMENT):
if (op in hasloadname):
oparg = (ord(code[i]) + (ord(code[(i + 1)]) * 256))
name = co.co_names[oparg]
indexset[name] = 1
if (len(indexset) >= len_co_names):
break
i = (i + 2)
for name in co.co_varnames:
try:
del indexset[name]
except KeyError:
pass
return indexset
| [
"def",
"co_code_findloadednames",
"(",
"co",
")",
":",
"from",
"opcode",
"import",
"HAVE_ARGUMENT",
",",
"opmap",
"hasloadname",
"=",
"(",
"opmap",
"[",
"'LOAD_NAME'",
"]",
",",
"opmap",
"[",
"'LOAD_GLOBAL'",
"]",
",",
"opmap",
"[",
"'LOAD_FAST'",
"]",
")",
... | find in the code of a code object . | train | false |
28,532 | def MaxPool2d(net, filter_size=(2, 2), strides=None, padding='SAME', name='maxpool'):
if (strides is None):
strides = filter_size
net = PoolLayer(net, ksize=[1, filter_size[0], filter_size[1], 1], strides=[1, strides[0], strides[1], 1], padding=padding, pool=tf.nn.max_pool, name=name)
return net
| [
"def",
"MaxPool2d",
"(",
"net",
",",
"filter_size",
"=",
"(",
"2",
",",
"2",
")",
",",
"strides",
"=",
"None",
",",
"padding",
"=",
"'SAME'",
",",
"name",
"=",
"'maxpool'",
")",
":",
"if",
"(",
"strides",
"is",
"None",
")",
":",
"strides",
"=",
"... | wrapper for :class:poollayer . | train | false |
28,533 | def _check_delayed_ssp(container):
if ((container.proj is True) or all((p['active'] for p in container.info['projs']))):
raise RuntimeError('Projs are already applied. Please initialize the data with proj set to False.')
elif (len(container.info['projs']) < 1):
raise RuntimeError('No projs found in evoked.')
| [
"def",
"_check_delayed_ssp",
"(",
"container",
")",
":",
"if",
"(",
"(",
"container",
".",
"proj",
"is",
"True",
")",
"or",
"all",
"(",
"(",
"p",
"[",
"'active'",
"]",
"for",
"p",
"in",
"container",
".",
"info",
"[",
"'projs'",
"]",
")",
")",
")",
... | aux function to be used for interactive ssp selection . | train | false |
28,534 | def pportInError():
if (port.DlPortReadPortUchar(statusRegAdrs) & 8):
return 1
else:
return 0
| [
"def",
"pportInError",
"(",
")",
":",
"if",
"(",
"port",
".",
"DlPortReadPortUchar",
"(",
"statusRegAdrs",
")",
"&",
"8",
")",
":",
"return",
"1",
"else",
":",
"return",
"0"
] | input from error pin . | train | false |
28,535 | def _read_fcfg_production(input, fstruct_reader):
return _read_production(input, fstruct_reader)
| [
"def",
"_read_fcfg_production",
"(",
"input",
",",
"fstruct_reader",
")",
":",
"return",
"_read_production",
"(",
"input",
",",
"fstruct_reader",
")"
] | return a list of feature-based productions . | train | false |
28,536 | def remote_docker_compose(client_ip, docker_host, compose_file_path, *args):
return remote_command(client_ip, (('COMPOSE_HTTP_TIMEOUT=360', 'DOCKER_TLS_VERIFY=1', 'DOCKER_HOST={}'.format(docker_host), 'docker-compose', '--file', compose_file_path) + args))
| [
"def",
"remote_docker_compose",
"(",
"client_ip",
",",
"docker_host",
",",
"compose_file_path",
",",
"*",
"args",
")",
":",
"return",
"remote_command",
"(",
"client_ip",
",",
"(",
"(",
"'COMPOSE_HTTP_TIMEOUT=360'",
",",
"'DOCKER_TLS_VERIFY=1'",
",",
"'DOCKER_HOST={}'"... | run docker-compose on client_ip . | train | false |
28,538 | def parse_multipart_files(request):
(_, pdict) = cgi.parse_header(request.headers[u'Content-Type'])
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
data = cgi.parse_multipart(buf, pdict)
(_, filedata) = data.popitem()
buf.close()
buf = BytesIO()
buf.write(u''.join(filedata))
buf.seek(0)
return buf
| [
"def",
"parse_multipart_files",
"(",
"request",
")",
":",
"(",
"_",
",",
"pdict",
")",
"=",
"cgi",
".",
"parse_header",
"(",
"request",
".",
"headers",
"[",
"u'Content-Type'",
"]",
")",
"buf",
"=",
"BytesIO",
"(",
")",
"buf",
".",
"write",
"(",
"reques... | given a prepared reqest . | train | false |
28,540 | def on_show(request, page_name):
revision_id = request.args.get('rev', type=int)
query = RevisionedPage.query.filter_by(name=page_name)
if revision_id:
query = query.filter_by(revision_id=revision_id)
revision_requested = True
else:
query = query.order_by(RevisionedPage.revision_id.desc())
revision_requested = False
page = query.first()
if (page is None):
return page_missing(request, page_name, revision_requested)
return Response(generate_template('action_show.html', page=page))
| [
"def",
"on_show",
"(",
"request",
",",
"page_name",
")",
":",
"revision_id",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'rev'",
",",
"type",
"=",
"int",
")",
"query",
"=",
"RevisionedPage",
".",
"query",
".",
"filter_by",
"(",
"name",
"=",
"page_na... | displays the page the user requests . | train | true |
28,542 | def cinder_docstring_one_line(physical_line):
pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE])
end = max([(physical_line[(-4):(-1)] == i) for i in DOCSTRING_TRIPLE])
if ((pos != (-1)) and end and (len(physical_line) > (pos + 4))):
if ((physical_line[(-5)] != '.') and physical_line):
return (pos, 'CINDER N402: one line docstring needs a period')
| [
"def",
"cinder_docstring_one_line",
"(",
"physical_line",
")",
":",
"pos",
"=",
"max",
"(",
"[",
"physical_line",
".",
"find",
"(",
"i",
")",
"for",
"i",
"in",
"DOCSTRING_TRIPLE",
"]",
")",
"end",
"=",
"max",
"(",
"[",
"(",
"physical_line",
"[",
"(",
"... | check one line docstring end . | train | false |
28,543 | def _init_media_form(form_cls, request=None, obj=None, ignore_fields=()):
post_data = None
initial = None
if request:
initial = {'locale': request.LANGUAGE_CODE}
file_data = None
if (request.method == 'POST'):
file_data = request.FILES
post_data = request.POST.copy()
if (obj and ignore_fields):
for f in ignore_fields:
post_data[f] = getattr(obj, f)
return form_cls(post_data, file_data, instance=obj, initial=initial, is_ajax=False)
| [
"def",
"_init_media_form",
"(",
"form_cls",
",",
"request",
"=",
"None",
",",
"obj",
"=",
"None",
",",
"ignore_fields",
"=",
"(",
")",
")",
":",
"post_data",
"=",
"None",
"initial",
"=",
"None",
"if",
"request",
":",
"initial",
"=",
"{",
"'locale'",
":... | initializes the media form with an image/video instance and posted data . | train | false |
28,545 | def solow_model(t, k, g, n, s, alpha, delta):
k_dot = ((s * (k ** alpha)) - (((g + n) + delta) * k))
return k_dot
| [
"def",
"solow_model",
"(",
"t",
",",
"k",
",",
"g",
",",
"n",
",",
"s",
",",
"alpha",
",",
"delta",
")",
":",
"k_dot",
"=",
"(",
"(",
"s",
"*",
"(",
"k",
"**",
"alpha",
")",
")",
"-",
"(",
"(",
"(",
"g",
"+",
"n",
")",
"+",
"delta",
")"... | equation of motion for capital stock . | train | false |
28,546 | def _fssys(name, value=None, log_lvl=None, log_msg=None):
fspath = _fspath()
if (not fspath):
return False
else:
return _sysfs_attr([fspath, name], value, log_lvl, log_msg)
| [
"def",
"_fssys",
"(",
"name",
",",
"value",
"=",
"None",
",",
"log_lvl",
"=",
"None",
",",
"log_msg",
"=",
"None",
")",
":",
"fspath",
"=",
"_fspath",
"(",
")",
"if",
"(",
"not",
"fspath",
")",
":",
"return",
"False",
"else",
":",
"return",
"_sysfs... | simple wrapper to interface with bcache sysfs . | train | true |
28,547 | def _get_reg_software():
ignore_list = [u'AddressBook', u'Connection Manager', u'DirectDrawEx', u'Fontcore', u'IE40', u'IE4Data', u'IE5BAKEX', u'IEData', u'MobileOptionPack', u'SchedulingAgent', u'WIC', u'Not Found', u'(value not set)', u'', None]
reg_software = {}
hive = u'HKLM'
key = u'Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall'
def update(hive, key, reg_key, use_32bit):
d_name = u''
d_vers = u''
d_name = __salt__[u'reg.read_value'](hive, u'{0}\\{1}'.format(key, reg_key), u'DisplayName', use_32bit)[u'vdata']
d_vers = __salt__[u'reg.read_value'](hive, u'{0}\\{1}'.format(key, reg_key), u'DisplayVersion', use_32bit)[u'vdata']
if (d_name not in ignore_list):
reg_software.update({d_name: str(d_vers)})
for reg_key in __salt__[u'reg.list_keys'](hive, key):
update(hive, key, reg_key, False)
for reg_key in __salt__[u'reg.list_keys'](hive, key, True):
update(hive, key, reg_key, True)
return reg_software
| [
"def",
"_get_reg_software",
"(",
")",
":",
"ignore_list",
"=",
"[",
"u'AddressBook'",
",",
"u'Connection Manager'",
",",
"u'DirectDrawEx'",
",",
"u'Fontcore'",
",",
"u'IE40'",
",",
"u'IE4Data'",
",",
"u'IE5BAKEX'",
",",
"u'IEData'",
",",
"u'MobileOptionPack'",
",",
... | this searches the uninstall keys in the registry to find a match in the sub keys . | train | false |
28,548 | def getIsRadianClose(firstRadian, secondRadian):
return (abs((math.pi - abs((math.pi - ((firstRadian - secondRadian) % (math.pi + math.pi)))))) < 1e-06)
| [
"def",
"getIsRadianClose",
"(",
"firstRadian",
",",
"secondRadian",
")",
":",
"return",
"(",
"abs",
"(",
"(",
"math",
".",
"pi",
"-",
"abs",
"(",
"(",
"math",
".",
"pi",
"-",
"(",
"(",
"firstRadian",
"-",
"secondRadian",
")",
"%",
"(",
"math",
".",
... | determine if the firstradian is close to the secondradian . | train | false |
28,549 | @register.simple_tag
def get_location_links(unit):
ret = []
if (len(unit.location) == 0):
return u''
if unit.location.isdigit():
return (_(u'unit ID %s') % unit.location)
for location in unit.location.split(u','):
location = location.strip()
if (location == u''):
continue
location_parts = location.split(u':')
if (len(location_parts) == 2):
(filename, line) = location_parts
else:
filename = location_parts[0]
line = 0
link = unit.translation.subproject.get_repoweb_link(filename, line)
if (link is None):
ret.append(escape(location))
else:
ret.append(u'<a href="{0}">{1}</a>'.format(escape(link), escape(location)))
return mark_safe(u'\n'.join(ret))
| [
"@",
"register",
".",
"simple_tag",
"def",
"get_location_links",
"(",
"unit",
")",
":",
"ret",
"=",
"[",
"]",
"if",
"(",
"len",
"(",
"unit",
".",
"location",
")",
"==",
"0",
")",
":",
"return",
"u''",
"if",
"unit",
".",
"location",
".",
"isdigit",
... | generates links to source files where translation was used . | train | false |
28,553 | def read_pref_mode_from_name(name):
return _MONGOS_MODES.index(name)
| [
"def",
"read_pref_mode_from_name",
"(",
"name",
")",
":",
"return",
"_MONGOS_MODES",
".",
"index",
"(",
"name",
")"
] | get the read preference mode from mongos/uri name . | train | false |
28,554 | def footprint(sobject):
n = 0
for a in sobject.__keylist__:
v = getattr(sobject, a)
if (v is None):
continue
if isinstance(v, Object):
n += footprint(v)
continue
if hasattr(v, '__len__'):
if len(v):
n += 1
continue
n += 1
return n
| [
"def",
"footprint",
"(",
"sobject",
")",
":",
"n",
"=",
"0",
"for",
"a",
"in",
"sobject",
".",
"__keylist__",
":",
"v",
"=",
"getattr",
"(",
"sobject",
",",
"a",
")",
"if",
"(",
"v",
"is",
"None",
")",
":",
"continue",
"if",
"isinstance",
"(",
"v... | get the i{virtual footprint} of the object . | train | true |
28,556 | def GenerateClientLoginAuthToken(http_body):
token = get_client_login_token(http_body)
if token:
return ('GoogleLogin auth=%s' % token)
return None
| [
"def",
"GenerateClientLoginAuthToken",
"(",
"http_body",
")",
":",
"token",
"=",
"get_client_login_token",
"(",
"http_body",
")",
"if",
"token",
":",
"return",
"(",
"'GoogleLogin auth=%s'",
"%",
"token",
")",
"return",
"None"
] | returns the token value to use in authorization headers . | train | false |
28,557 | @task
def setup_python_macos():
HOMEBREW_URL = 'https://raw.githubusercontent.com/Homebrew/install/master/install'
local(('/usr/bin/ruby -e "$(curl -fsSL %s)"' % HOMEBREW_URL))
local('echo export PATH=/usr/local/bin:/usr/local/sbin:$PATH >> ~/.bash_profile')
local('brew install python')
local('brew update')
local('pip install virtualenvwrapper')
local('echo source /usr/local/bin/virtualenvwrapper.sh >> ~/.bash_profile')
| [
"@",
"task",
"def",
"setup_python_macos",
"(",
")",
":",
"HOMEBREW_URL",
"=",
"'https://raw.githubusercontent.com/Homebrew/install/master/install'",
"local",
"(",
"(",
"'/usr/bin/ruby -e \"$(curl -fsSL %s)\"'",
"%",
"HOMEBREW_URL",
")",
")",
"local",
"(",
"'echo export PATH=/... | setup python in macos via homebrew . | train | false |
28,558 | @contextlib.contextmanager
def expect_warnings_on(db, *messages, **kw):
spec = db_spec(db)
if (isinstance(db, util.string_types) and (not spec(config._current))):
(yield)
else:
with expect_warnings(*messages, **kw):
(yield)
| [
"@",
"contextlib",
".",
"contextmanager",
"def",
"expect_warnings_on",
"(",
"db",
",",
"*",
"messages",
",",
"**",
"kw",
")",
":",
"spec",
"=",
"db_spec",
"(",
"db",
")",
"if",
"(",
"isinstance",
"(",
"db",
",",
"util",
".",
"string_types",
")",
"and",... | context manager which expects one or more warnings on specific dialects . | train | false |
28,559 | def extract_configs(configs, parent_dir):
config_dir = os.path.join(parent_dir, 'configs')
if os.path.isdir(configs):
shutil.copytree(configs, config_dir, symlinks=True)
elif tarfile.is_tarfile(configs):
with tarfile.open(configs, 'r') as tar:
tar.extractall(config_dir)
else:
raise errors.Error('Unknown configurations file type')
return config_dir
| [
"def",
"extract_configs",
"(",
"configs",
",",
"parent_dir",
")",
":",
"config_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"parent_dir",
",",
"'configs'",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"configs",
")",
":",
"shutil",
".",
"copytre... | extracts configs to a new dir under parent_dir and returns it . | train | false |
28,561 | @cleanup
def test_bad_masked_sizes():
x = np.arange(3)
y = np.arange(3)
u = np.ma.array((15.0 * np.ones((4,))))
v = np.ma.array((15.0 * np.ones_like(u)))
u[1] = np.ma.masked
v[1] = np.ma.masked
(fig, ax) = plt.subplots()
with pytest.raises(ValueError):
ax.barbs(x, y, u, v)
| [
"@",
"cleanup",
"def",
"test_bad_masked_sizes",
"(",
")",
":",
"x",
"=",
"np",
".",
"arange",
"(",
"3",
")",
"y",
"=",
"np",
".",
"arange",
"(",
"3",
")",
"u",
"=",
"np",
".",
"ma",
".",
"array",
"(",
"(",
"15.0",
"*",
"np",
".",
"ones",
"(",... | test error handling when given differing sized masked arrays . | train | false |
28,563 | def format_instances(instances, features):
header = features
datarows = [[getattr(x, f) for f in features] for x in instances]
return (header, datarows)
| [
"def",
"format_instances",
"(",
"instances",
",",
"features",
")",
":",
"header",
"=",
"features",
"datarows",
"=",
"[",
"[",
"getattr",
"(",
"x",
",",
"f",
")",
"for",
"f",
"in",
"features",
"]",
"for",
"x",
"in",
"instances",
"]",
"return",
"(",
"h... | convert a list of instances into a header list and datarows list . | train | false |
28,564 | def dense2cvxopt(value):
import cvxopt
return cvxopt.matrix(value, tc='d')
| [
"def",
"dense2cvxopt",
"(",
"value",
")",
":",
"import",
"cvxopt",
"return",
"cvxopt",
".",
"matrix",
"(",
"value",
",",
"tc",
"=",
"'d'",
")"
] | converts a numpy matrix to a cvxopt matrix . | train | false |
28,565 | def drop_privileges(user, call_setsid=True):
if (os.geteuid() == 0):
groups = [g.gr_gid for g in grp.getgrall() if (user in g.gr_mem)]
os.setgroups(groups)
user = pwd.getpwnam(user)
os.setgid(user[3])
os.setuid(user[2])
os.environ['HOME'] = user[5]
if call_setsid:
try:
os.setsid()
except OSError:
pass
os.chdir('/')
os.umask(18)
| [
"def",
"drop_privileges",
"(",
"user",
",",
"call_setsid",
"=",
"True",
")",
":",
"if",
"(",
"os",
".",
"geteuid",
"(",
")",
"==",
"0",
")",
":",
"groups",
"=",
"[",
"g",
".",
"gr_gid",
"for",
"g",
"in",
"grp",
".",
"getgrall",
"(",
")",
"if",
... | drops privileges to selected user . | train | false |
28,566 | def view_with_secure(request):
response = HttpResponse()
response.test_was_secure_request = request.is_secure()
response.test_server_port = request.META.get('SERVER_PORT', 80)
return response
| [
"def",
"view_with_secure",
"(",
"request",
")",
":",
"response",
"=",
"HttpResponse",
"(",
")",
"response",
".",
"test_was_secure_request",
"=",
"request",
".",
"is_secure",
"(",
")",
"response",
".",
"test_server_port",
"=",
"request",
".",
"META",
".",
"get"... | a view that indicates if the request was secure . | train | false |
28,567 | def listens_for(target, identifier, *args, **kw):
def decorate(fn):
listen(target, identifier, fn, *args, **kw)
return fn
return decorate
| [
"def",
"listens_for",
"(",
"target",
",",
"identifier",
",",
"*",
"args",
",",
"**",
"kw",
")",
":",
"def",
"decorate",
"(",
"fn",
")",
":",
"listen",
"(",
"target",
",",
"identifier",
",",
"fn",
",",
"*",
"args",
",",
"**",
"kw",
")",
"return",
... | decorate a function as a listener for the given target + identifier . | train | false |
28,569 | def available_capabilities(image=None):
if (salt.utils.version_cmp(__grains__['osversion'], '10') == (-1)):
raise NotImplementedError('`installed_capabilities` is not available on this version of Windows: {0}'.format(__grains__['osversion']))
return _get_components('Capability Identity', 'Capabilities', 'Not Present')
| [
"def",
"available_capabilities",
"(",
"image",
"=",
"None",
")",
":",
"if",
"(",
"salt",
".",
"utils",
".",
"version_cmp",
"(",
"__grains__",
"[",
"'osversion'",
"]",
",",
"'10'",
")",
"==",
"(",
"-",
"1",
")",
")",
":",
"raise",
"NotImplementedError",
... | list the capabilities available on the system args: image : the path to the root directory of an offline windows image . | train | true |
28,570 | def hash_infile(afile, chunk_len=8192, crypto=hashlib.md5):
hex = None
if os.path.isfile(afile):
crypto_obj = crypto()
with open(afile, u'rb') as fp:
while True:
data = fp.read(chunk_len)
if (not data):
break
crypto_obj.update(data)
hex = crypto_obj.hexdigest()
return hex
| [
"def",
"hash_infile",
"(",
"afile",
",",
"chunk_len",
"=",
"8192",
",",
"crypto",
"=",
"hashlib",
".",
"md5",
")",
":",
"hex",
"=",
"None",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"afile",
")",
":",
"crypto_obj",
"=",
"crypto",
"(",
")",
"with... | computes hash of a file using crypto module . | train | false |
28,571 | def _shouldEnableNewStyle():
value = os.environ.get('TWISTED_NEWSTYLE', '')
if (value in ['', 'no', 'false', 'False', '0']):
return False
else:
return True
| [
"def",
"_shouldEnableNewStyle",
"(",
")",
":",
"value",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'TWISTED_NEWSTYLE'",
",",
"''",
")",
"if",
"(",
"value",
"in",
"[",
"''",
",",
"'no'",
",",
"'false'",
",",
"'False'",
",",
"'0'",
"]",
")",
":",
"... | returns whether or not we should enable the new-style conversion of old-style classes . | train | false |
28,572 | def losetup_detach_all(root_path):
for (device_file, backing_file) in _losetup_list():
try:
backing_file.segmentsFrom(root_path)
except ValueError:
pass
else:
losetup_detach(device_file)
| [
"def",
"losetup_detach_all",
"(",
"root_path",
")",
":",
"for",
"(",
"device_file",
",",
"backing_file",
")",
"in",
"_losetup_list",
"(",
")",
":",
"try",
":",
"backing_file",
".",
"segmentsFrom",
"(",
"root_path",
")",
"except",
"ValueError",
":",
"pass",
"... | detach all loop devices associated with files contained in root_path . | train | false |
28,580 | def remove_user_milestone(user, milestone):
if (not settings.FEATURES.get('MILESTONES_APP')):
return None
return milestones_api.remove_user_milestone(user, milestone)
| [
"def",
"remove_user_milestone",
"(",
"user",
",",
"milestone",
")",
":",
"if",
"(",
"not",
"settings",
".",
"FEATURES",
".",
"get",
"(",
"'MILESTONES_APP'",
")",
")",
":",
"return",
"None",
"return",
"milestones_api",
".",
"remove_user_milestone",
"(",
"user",... | client api operation adapter/wrapper . | train | false |
28,581 | def boolean(value):
if isinstance(value, bool):
return value
if (not value):
raise ValueError('boolean type must be non-null')
value = value.lower()
if (value in ('true', '1')):
return True
if (value in ('false', '0')):
return False
raise ValueError('Invalid literal for boolean(): {0}'.format(value))
| [
"def",
"boolean",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"return",
"value",
"if",
"(",
"not",
"value",
")",
":",
"raise",
"ValueError",
"(",
"'boolean type must be non-null'",
")",
"value",
"=",
"value",
".",
"lo... | test whether a string is a tk boolean . | train | true |
28,583 | def is_worse_than_or_equal_to(lhs, rhs):
if (lhs == rhs):
return True
return is_worse_than(lhs, rhs)
| [
"def",
"is_worse_than_or_equal_to",
"(",
"lhs",
",",
"rhs",
")",
":",
"if",
"(",
"lhs",
"==",
"rhs",
")",
":",
"return",
"True",
"return",
"is_worse_than",
"(",
"lhs",
",",
"rhs",
")"
] | compare two statuses and return a boolean indicating if the lhs status is worse than or equal to the rhs status . | train | false |
28,584 | def testMininetCluster(remote='ubuntu2', link=RemoteGRELink):
servers = ['localhost', remote]
topo = TreeTopo(depth=3, fanout=3)
net = MininetCluster(topo=topo, servers=servers, link=link, placement=SwitchBinPlacer)
net.start()
net.pingAll()
net.stop()
| [
"def",
"testMininetCluster",
"(",
"remote",
"=",
"'ubuntu2'",
",",
"link",
"=",
"RemoteGRELink",
")",
":",
"servers",
"=",
"[",
"'localhost'",
",",
"remote",
"]",
"topo",
"=",
"TreeTopo",
"(",
"depth",
"=",
"3",
",",
"fanout",
"=",
"3",
")",
"net",
"="... | test mininetcluster() . | train | false |
28,585 | def _possible_string_format_functions(format_):
(yield (lambda format_, val: format(val, format_)))
(yield (lambda format_, val: format_.format(val)))
(yield (lambda format_, val: (format_ % val)))
| [
"def",
"_possible_string_format_functions",
"(",
"format_",
")",
":",
"(",
"yield",
"(",
"lambda",
"format_",
",",
"val",
":",
"format",
"(",
"val",
",",
"format_",
")",
")",
")",
"(",
"yield",
"(",
"lambda",
"format_",
",",
"val",
":",
"format_",
".",
... | iterate through possible string-derived format functions . | train | false |
28,586 | def delete_net_dev(dev):
if device_exists(dev):
try:
utils.execute('ip', 'link', 'delete', dev, run_as_root=True, check_exit_code=[0, 2, 254])
LOG.debug("Net device removed: '%s'", dev)
except processutils.ProcessExecutionError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed removing net device: '%s'"), dev)
| [
"def",
"delete_net_dev",
"(",
"dev",
")",
":",
"if",
"device_exists",
"(",
"dev",
")",
":",
"try",
":",
"utils",
".",
"execute",
"(",
"'ip'",
",",
"'link'",
",",
"'delete'",
",",
"dev",
",",
"run_as_root",
"=",
"True",
",",
"check_exit_code",
"=",
"[",... | delete a network device only if it exists . | train | false |
28,587 | def _TearDownStubs():
logging.info('Applying all pending transactions and saving the datastore')
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
datastore_stub.Write()
| [
"def",
"_TearDownStubs",
"(",
")",
":",
"logging",
".",
"info",
"(",
"'Applying all pending transactions and saving the datastore'",
")",
"datastore_stub",
"=",
"apiproxy_stub_map",
".",
"apiproxy",
".",
"GetStub",
"(",
"'datastore_v3'",
")",
"datastore_stub",
".",
"Wri... | clean up any stubs that need cleanup . | train | false |
28,588 | def monotone_fn_inverter(fn, x, vectorized=True, **keywords):
x = np.asarray(x)
if vectorized:
y = fn(x, **keywords)
else:
y = []
for _x in x:
y.append(fn(_x, **keywords))
y = np.array(y)
a = np.argsort(y)
return interp1d(y[a], x[a])
| [
"def",
"monotone_fn_inverter",
"(",
"fn",
",",
"x",
",",
"vectorized",
"=",
"True",
",",
"**",
"keywords",
")",
":",
"x",
"=",
"np",
".",
"asarray",
"(",
"x",
")",
"if",
"vectorized",
":",
"y",
"=",
"fn",
"(",
"x",
",",
"**",
"keywords",
")",
"el... | given a monotone function fn and a set of x values . | train | false |
28,589 | def pr_image_represent(image_name, format=None, size=()):
table = current.s3db.pr_image_library
query = (table.original_name == image_name)
if format:
query = (query & (table.format == format))
if size:
query = ((query & (table.width == size[0])) & (table.height == size[1]))
image = current.db(query).select(table.new_name, limitby=(0, 1)).first()
if image:
return image.new_name
else:
return image_name
| [
"def",
"pr_image_represent",
"(",
"image_name",
",",
"format",
"=",
"None",
",",
"size",
"=",
"(",
")",
")",
":",
"table",
"=",
"current",
".",
"s3db",
".",
"pr_image_library",
"query",
"=",
"(",
"table",
".",
"original_name",
"==",
"image_name",
")",
"i... | get the image that matches the required image type . | train | false |
28,590 | def test_import_plain():
import os
os2 = import_item('os')
nt.assert_true((os is os2))
| [
"def",
"test_import_plain",
"(",
")",
":",
"import",
"os",
"os2",
"=",
"import_item",
"(",
"'os'",
")",
"nt",
".",
"assert_true",
"(",
"(",
"os",
"is",
"os2",
")",
")"
] | test simple imports . | train | false |
28,591 | def libvlc_audio_equalizer_get_amp_at_index(p_equalizer, u_band):
f = (_Cfunctions.get('libvlc_audio_equalizer_get_amp_at_index', None) or _Cfunction('libvlc_audio_equalizer_get_amp_at_index', ((1,), (1,)), None, ctypes.c_float, ctypes.c_void_p, ctypes.c_uint))
return f(p_equalizer, u_band)
| [
"def",
"libvlc_audio_equalizer_get_amp_at_index",
"(",
"p_equalizer",
",",
"u_band",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_audio_equalizer_get_amp_at_index'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_audio_equalizer_get_amp_at_index'... | get the amplification value for a particular equalizer frequency band . | train | true |
28,594 | def _get_first_aggregate_text(node_list):
if (not node_list):
return ''
out = []
for node in node_list[0].childNodes:
if (node.nodeType == dom.Document.TEXT_NODE):
out.append(node.nodeValue)
return '\n'.join(out)
| [
"def",
"_get_first_aggregate_text",
"(",
"node_list",
")",
":",
"if",
"(",
"not",
"node_list",
")",
":",
"return",
"''",
"out",
"=",
"[",
"]",
"for",
"node",
"in",
"node_list",
"[",
"0",
"]",
".",
"childNodes",
":",
"if",
"(",
"node",
".",
"nodeType",
... | extract text from the first occurred dom aggregate . | train | true |
28,595 | def demo__custom_identity_verify(identity_dict):
import hashlib
import requests
import config
if (('cc98_username' not in identity_dict) or ('cc98_password' not in identity_dict)):
return False
try:
pass_md5 = hashlib.md5()
pass_md5.update(identity_dict['cc98_password'].encode())
pass_md5 = pass_md5.hexdigest()
if config.is_use_proxy:
proxy = config.requests_proxies
else:
proxy = None
r = requests.post('http://www.cc98.org/sign.asp', data={'a': 'i', 'u': identity_dict['cc98_username'], 'p': pass_md5, 'userhidden': 2}, proxies=proxy)
if (r.text == '9898'):
return True
else:
return False
except:
return False
| [
"def",
"demo__custom_identity_verify",
"(",
"identity_dict",
")",
":",
"import",
"hashlib",
"import",
"requests",
"import",
"config",
"if",
"(",
"(",
"'cc98_username'",
"not",
"in",
"identity_dict",
")",
"or",
"(",
"'cc98_password'",
"not",
"in",
"identity_dict",
... | for cc98 identity verify :type identity_dict: dict . | train | false |
28,596 | def load_vstr(buf, pos):
(slen, pos) = load_vint(buf, pos)
return load_bytes(buf, slen, pos)
| [
"def",
"load_vstr",
"(",
"buf",
",",
"pos",
")",
":",
"(",
"slen",
",",
"pos",
")",
"=",
"load_vint",
"(",
"buf",
",",
"pos",
")",
"return",
"load_bytes",
"(",
"buf",
",",
"slen",
",",
"pos",
")"
] | load bytes prefixed by vint length . | train | true |
28,597 | def enable(name, start=False, **kwargs):
if (not available(name)):
return False
alias = get_svc_alias()
if (name in alias):
log.error('This service is aliased, enable its alias instead')
return False
svc_realpath = _get_svc_path(name)[0]
down_file = os.path.join(svc_realpath, 'down')
if enabled(name):
if os.path.exists(down_file):
try:
os.unlink(down_file)
except OSError:
log.error('Unable to remove file {0}'.format(down_file))
return False
return True
if (not start):
log.trace('need a temporary file {0}'.format(down_file))
if (not os.path.exists(down_file)):
try:
salt.utils.fopen(down_file, 'w').close()
except IOError:
log.error('Unable to create file {0}'.format(down_file))
return False
try:
os.symlink(svc_realpath, _service_path(name))
except IOError:
log.error('Unable to create symlink {0}'.format(down_file))
if (not start):
os.unlink(down_file)
return False
cmd = 'sv status {0}'.format(_service_path(name))
retcode_sv = 1
count_sv = 0
while ((retcode_sv != 0) and (count_sv < 10)):
time.sleep(0.5)
count_sv += 1
call = __salt__['cmd.run_all'](cmd)
retcode_sv = call['retcode']
if ((not start) and os.path.exists(down_file)):
try:
os.unlink(down_file)
except OSError:
log.error('Unable to remove temp file {0}'.format(down_file))
retcode_sv = 1
if (retcode_sv != 0):
os.unlink(os.path.join([_service_path(name), name]))
return False
return True
| [
"def",
"enable",
"(",
"name",
",",
"start",
"=",
"False",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"available",
"(",
"name",
")",
")",
":",
"return",
"False",
"alias",
"=",
"get_svc_alias",
"(",
")",
"if",
"(",
"name",
"in",
"alias",
")",
... | enable the named service to start at boot cli example: . | train | true |
28,598 | def make_case_flags(info):
flags = (info.flags & CASE_FLAGS)
if (info.flags & ASCII):
flags &= (~ FULLCASE)
return flags
| [
"def",
"make_case_flags",
"(",
"info",
")",
":",
"flags",
"=",
"(",
"info",
".",
"flags",
"&",
"CASE_FLAGS",
")",
"if",
"(",
"info",
".",
"flags",
"&",
"ASCII",
")",
":",
"flags",
"&=",
"(",
"~",
"FULLCASE",
")",
"return",
"flags"
] | makes the case flags . | train | false |
28,599 | @contextlib.contextmanager
def skip_cross_domain_referer_check(request):
is_secure_default = request.is_secure
request.is_secure = (lambda : False)
try:
(yield)
finally:
request.is_secure = is_secure_default
| [
"@",
"contextlib",
".",
"contextmanager",
"def",
"skip_cross_domain_referer_check",
"(",
"request",
")",
":",
"is_secure_default",
"=",
"request",
".",
"is_secure",
"request",
".",
"is_secure",
"=",
"(",
"lambda",
":",
"False",
")",
"try",
":",
"(",
"yield",
"... | skip the cross-domain csrf referer check . | train | false |
28,600 | def _makeBuildbotTac(basedir, tac_file_contents, quiet):
tacfile = os.path.join(basedir, 'buildbot.tac')
if os.path.exists(tacfile):
try:
oldcontents = open(tacfile, 'rt').read()
except IOError as exception:
raise CreateWorkerError(('error reading %s: %s' % (tacfile, exception.strerror)))
if (oldcontents == tac_file_contents):
if (not quiet):
print('buildbot.tac already exists and is correct')
return
if (not quiet):
print('not touching existing buildbot.tac')
print('creating buildbot.tac.new instead')
tacfile = os.path.join(basedir, 'buildbot.tac.new')
try:
f = open(tacfile, 'wt')
f.write(tac_file_contents)
f.close()
os.chmod(tacfile, 384)
except IOError as exception:
raise CreateWorkerError(('could not write %s: %s' % (tacfile, exception.strerror)))
| [
"def",
"_makeBuildbotTac",
"(",
"basedir",
",",
"tac_file_contents",
",",
"quiet",
")",
":",
"tacfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"basedir",
",",
"'buildbot.tac'",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"tacfile",
")",
":",
"... | create buildbot . | train | true |
28,601 | def download_and_untar_files(source_url, target_parent_dir, tar_root_name, target_root_name):
if (not os.path.exists(os.path.join(target_parent_dir, target_root_name))):
print ('Downloading and untarring file %s to %s' % (tar_root_name, target_parent_dir))
common.ensure_directory_exists(target_parent_dir)
urllib.urlretrieve(source_url, TMP_UNZIP_PATH)
with contextlib.closing(tarfile.open(TMP_UNZIP_PATH, 'r:gz')) as tfile:
tfile.extractall(target_parent_dir)
os.remove(TMP_UNZIP_PATH)
os.rename(os.path.join(target_parent_dir, tar_root_name), os.path.join(target_parent_dir, target_root_name))
| [
"def",
"download_and_untar_files",
"(",
"source_url",
",",
"target_parent_dir",
",",
"tar_root_name",
",",
"target_root_name",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"target_parent_dir",
",",
"... | downloads a tar file . | train | false |
28,604 | def binary_distance(label1, label2):
return (0.0 if (label1 == label2) else 1.0)
| [
"def",
"binary_distance",
"(",
"label1",
",",
"label2",
")",
":",
"return",
"(",
"0.0",
"if",
"(",
"label1",
"==",
"label2",
")",
"else",
"1.0",
")"
] | simple equality test . | train | false |
28,605 | @nox.parametrize('sample', NON_GAE_STANDARD_SAMPLES)
def session_py35(session, sample):
session.interpreter = 'python3.5'
_session_tests(session, sample)
| [
"@",
"nox",
".",
"parametrize",
"(",
"'sample'",
",",
"NON_GAE_STANDARD_SAMPLES",
")",
"def",
"session_py35",
"(",
"session",
",",
"sample",
")",
":",
"session",
".",
"interpreter",
"=",
"'python3.5'",
"_session_tests",
"(",
"session",
",",
"sample",
")"
] | runs py . | train | false |
28,606 | def interfaces6(all=False):
out = {}
for (name, addrs) in interfaces(all=all).items():
addrs = [addr for (fam, addr) in addrs if (fam == socket.AF_INET6)]
if (addrs or all):
out[name] = addrs
return out
| [
"def",
"interfaces6",
"(",
"all",
"=",
"False",
")",
":",
"out",
"=",
"{",
"}",
"for",
"(",
"name",
",",
"addrs",
")",
"in",
"interfaces",
"(",
"all",
"=",
"all",
")",
".",
"items",
"(",
")",
":",
"addrs",
"=",
"[",
"addr",
"for",
"(",
"fam",
... | interfaces6 -> dict as :func:interfaces but only includes ipv6 addresses and the lists in the dictionary only contains the addresses not the family . | train | false |
28,608 | def nullDebugAction(*args):
pass
| [
"def",
"nullDebugAction",
"(",
"*",
"args",
")",
":",
"pass"
] | do-nothing debug action . | train | false |
28,610 | def _normalize_hosts(hosts):
if (hosts is None):
return [{}]
if isinstance(hosts, string_types):
hosts = [hosts]
out = []
for host in hosts:
if isinstance(host, string_types):
if (u'://' not in host):
host = (u'//%s' % host)
parsed_url = urlparse(host)
h = {u'host': parsed_url.hostname}
if parsed_url.port:
h[u'port'] = parsed_url.port
if (parsed_url.scheme == u'https'):
h[u'port'] = (parsed_url.port or 443)
h[u'use_ssl'] = True
h[u'scheme'] = u'http'
elif parsed_url.scheme:
h[u'scheme'] = parsed_url.scheme
if (parsed_url.username or parsed_url.password):
h[u'http_auth'] = (u'%s:%s' % (parsed_url.username, parsed_url.password))
if (parsed_url.path and (parsed_url.path != u'/')):
h[u'url_prefix'] = parsed_url.path
out.append(h)
else:
out.append(host)
return out
| [
"def",
"_normalize_hosts",
"(",
"hosts",
")",
":",
"if",
"(",
"hosts",
"is",
"None",
")",
":",
"return",
"[",
"{",
"}",
"]",
"if",
"isinstance",
"(",
"hosts",
",",
"string_types",
")",
":",
"hosts",
"=",
"[",
"hosts",
"]",
"out",
"=",
"[",
"]",
"... | helper function to transform hosts argument to :class:~elasticsearch . | train | false |
28,611 | def set_win32_requests_ca_bundle_path():
import requests.adapters
if hasattr(sys, 'frozen'):
prog_path = os.path.dirname(sys.executable)
ca_bundle_path = os.path.join(prog_path, 'cacert.pem')
requests.adapters.DEFAULT_CA_BUNDLE_PATH = ca_bundle_path
log.info('Default CA bundle path of the requests library: {0}'.format(requests.adapters.DEFAULT_CA_BUNDLE_PATH))
| [
"def",
"set_win32_requests_ca_bundle_path",
"(",
")",
":",
"import",
"requests",
".",
"adapters",
"if",
"hasattr",
"(",
"sys",
",",
"'frozen'",
")",
":",
"prog_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"sys",
".",
"executable",
")",
"ca_bundle_path... | in order to allow requests to validate ssl requests with the packaged . | train | false |
28,612 | def replace_header(headers, name, value):
name = name.lower()
i = 0
result = None
while (i < len(headers)):
if (headers[i][0].lower() == name):
assert (not result), ("two values for the header '%s' found" % name)
result = headers[i][1]
headers[i] = (name, value)
i += 1
if (not result):
headers.append((name, value))
return result
| [
"def",
"replace_header",
"(",
"headers",
",",
"name",
",",
"value",
")",
":",
"name",
"=",
"name",
".",
"lower",
"(",
")",
"i",
"=",
"0",
"result",
"=",
"None",
"while",
"(",
"i",
"<",
"len",
"(",
"headers",
")",
")",
":",
"if",
"(",
"headers",
... | updates the headers replacing the first occurance of the given name with the value provided; asserting that no further occurances happen . | train | true |
28,613 | def negated(input_words, include_nt=True):
neg_words = []
neg_words.extend(NEGATE)
for word in neg_words:
if (word in input_words):
return True
if include_nt:
for word in input_words:
if ("n't" in word):
return True
if ('least' in input_words):
i = input_words.index('least')
if ((i > 0) and (input_words[(i - 1)] != 'at')):
return True
return False
| [
"def",
"negated",
"(",
"input_words",
",",
"include_nt",
"=",
"True",
")",
":",
"neg_words",
"=",
"[",
"]",
"neg_words",
".",
"extend",
"(",
"NEGATE",
")",
"for",
"word",
"in",
"neg_words",
":",
"if",
"(",
"word",
"in",
"input_words",
")",
":",
"return... | determine if input contains negation words . | train | false |
28,614 | def atc(jobid):
atjob_file = '/var/spool/cron/atjobs/{job}'.format(job=jobid)
if __salt__['file.file_exists'](atjob_file):
return ''.join(salt.utils.fopen(atjob_file, 'r').readlines())
else:
return {'error': "invalid job id '{0}'".format(jobid)}
| [
"def",
"atc",
"(",
"jobid",
")",
":",
"atjob_file",
"=",
"'/var/spool/cron/atjobs/{job}'",
".",
"format",
"(",
"job",
"=",
"jobid",
")",
"if",
"__salt__",
"[",
"'file.file_exists'",
"]",
"(",
"atjob_file",
")",
":",
"return",
"''",
".",
"join",
"(",
"salt"... | print the at(1) script that will run for the passed job id . | train | false |
28,615 | def _unjoin_service(service):
_apply_service(service, SonosDevice.unjoin)
| [
"def",
"_unjoin_service",
"(",
"service",
")",
":",
"_apply_service",
"(",
"service",
",",
"SonosDevice",
".",
"unjoin",
")"
] | unjoin the player from a group . | train | false |
28,617 | def _unwrap_response(resp):
for (resp_key, resp_data) in resp.items():
if resp_key.endswith('_response'):
for (result_key, result) in resp_data.items():
if result_key.endswith('_result'):
return result
return {}
raise ValueError(resp)
| [
"def",
"_unwrap_response",
"(",
"resp",
")",
":",
"for",
"(",
"resp_key",
",",
"resp_data",
")",
"in",
"resp",
".",
"items",
"(",
")",
":",
"if",
"resp_key",
".",
"endswith",
"(",
"'_response'",
")",
":",
"for",
"(",
"result_key",
",",
"result",
")",
... | get the actual result from an iam api response . | train | false |
28,618 | def TR16(rv, max=4, pow=False):
def f(rv):
if (not (isinstance(rv, Pow) and (rv.base.func is cos))):
return rv
ia = (1 / rv)
a = _TR56(ia, cos, tan, (lambda x: (1 + x)), max=max, pow=pow)
if (a != ia):
rv = a
return rv
return bottom_up(rv, f)
| [
"def",
"TR16",
"(",
"rv",
",",
"max",
"=",
"4",
",",
"pow",
"=",
"False",
")",
":",
"def",
"f",
"(",
"rv",
")",
":",
"if",
"(",
"not",
"(",
"isinstance",
"(",
"rv",
",",
"Pow",
")",
"and",
"(",
"rv",
".",
"base",
".",
"func",
"is",
"cos",
... | convert cos(x)*-2 to 1 + tan(x)**2 . | train | false |
28,619 | def create_disk_from_distro(vm_, linode_id, swap_size=None):
kwargs = {}
if (swap_size is None):
swap_size = get_swap_size(vm_)
pub_key = get_pub_key(vm_)
root_password = get_password(vm_)
if pub_key:
kwargs.update({'rootSSHKey': pub_key})
if root_password:
kwargs.update({'rootPass': root_password})
else:
raise SaltCloudConfigError('The Linode driver requires a password.')
kwargs.update({'LinodeID': linode_id, 'DistributionID': get_distribution_id(vm_), 'Label': vm_['name'], 'Size': get_disk_size(vm_, swap_size, linode_id)})
result = _query('linode', 'disk.createfromdistribution', args=kwargs)
return _clean_data(result)
| [
"def",
"create_disk_from_distro",
"(",
"vm_",
",",
"linode_id",
",",
"swap_size",
"=",
"None",
")",
":",
"kwargs",
"=",
"{",
"}",
"if",
"(",
"swap_size",
"is",
"None",
")",
":",
"swap_size",
"=",
"get_swap_size",
"(",
"vm_",
")",
"pub_key",
"=",
"get_pub... | creates the disk for the linode from the distribution . | train | true |
28,620 | def trunk_by_port_provider(resource, port_id, context, **kwargs):
return trunk_objects.Trunk.get_object(context, port_id=port_id)
| [
"def",
"trunk_by_port_provider",
"(",
"resource",
",",
"port_id",
",",
"context",
",",
"**",
"kwargs",
")",
":",
"return",
"trunk_objects",
".",
"Trunk",
".",
"get_object",
"(",
"context",
",",
"port_id",
"=",
"port_id",
")"
] | provider callback to supply trunk information by parent port . | train | false |
28,621 | def _volume_type_get_full(context, id):
return _volume_type_get(context, id, session=None, inactive=False, expected_fields=('extra_specs', 'projects'))
| [
"def",
"_volume_type_get_full",
"(",
"context",
",",
"id",
")",
":",
"return",
"_volume_type_get",
"(",
"context",
",",
"id",
",",
"session",
"=",
"None",
",",
"inactive",
"=",
"False",
",",
"expected_fields",
"=",
"(",
"'extra_specs'",
",",
"'projects'",
")... | return dict for a specific volume_type with extra_specs and projects . | train | false |
28,622 | @handle_response_format
@treeio_login_required
def account_add(request, response_format='html'):
if request.POST:
if ('cancel' not in request.POST):
account = Account()
form = AccountForm(request.user.profile, request.POST, instance=account)
if form.is_valid():
account = form.save(commit=False)
convert(account, 'balance')
account.set_user_from_request(request)
return HttpResponseRedirect(reverse('finance_account_view', args=[account.id]))
else:
return HttpResponseRedirect(reverse('finance_index_accounts'))
else:
form = AccountForm(request.user.profile)
return render_to_response('finance/account_add', {'form': form}, context_instance=RequestContext(request), response_format=response_format)
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"account_add",
"(",
"request",
",",
"response_format",
"=",
"'html'",
")",
":",
"if",
"request",
".",
"POST",
":",
"if",
"(",
"'cancel'",
"not",
"in",
"request",
".",
"POST",
")",
":",
"acco... | new account form . | train | false |
28,623 | def safe_filename(name, extension=None, digest=None, max_length=_MAX_FILENAME_LENGTH):
if (os.path.basename(name) != name):
raise ValueError(u'Name must be a filename, handed a path: {}'.format(name))
ext = (extension or u'')
filename = (name + ext)
if (len(filename) <= max_length):
return filename
else:
digest = (digest or hashlib.sha1())
digest.update(name)
safe_name = (digest.hexdigest() + ext)
if (len(safe_name) > max_length):
raise ValueError(u'Digest {} failed to produce a filename <= {} characters for {} - got {}'.format(digest, max_length, filename, safe_name))
return safe_name
| [
"def",
"safe_filename",
"(",
"name",
",",
"extension",
"=",
"None",
",",
"digest",
"=",
"None",
",",
"max_length",
"=",
"_MAX_FILENAME_LENGTH",
")",
":",
"if",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"name",
")",
"!=",
"name",
")",
":",
"raise",
... | strip filesystem-unfriendly characters from a filename . | train | false |
28,624 | @_return_mount_error
def additions_install(**kwargs):
with _additions_mounted() as mount_point:
kernel = __grains__.get('kernel', '')
if (kernel == 'Linux'):
return _additions_install_linux(mount_point, **kwargs)
| [
"@",
"_return_mount_error",
"def",
"additions_install",
"(",
"**",
"kwargs",
")",
":",
"with",
"_additions_mounted",
"(",
")",
"as",
"mount_point",
":",
"kernel",
"=",
"__grains__",
".",
"get",
"(",
"'kernel'",
",",
"''",
")",
"if",
"(",
"kernel",
"==",
"'... | install virtualbox guest additions . | train | true |
28,626 | def app_compile(app, request, skip_failed_views=False):
from gluon.compileapp import compile_application, remove_compiled_application
folder = apath(app, request)
try:
failed_views = compile_application(folder, skip_failed_views)
return failed_views
except (Exception, RestrictedError):
tb = traceback.format_exc()
remove_compiled_application(folder)
return tb
| [
"def",
"app_compile",
"(",
"app",
",",
"request",
",",
"skip_failed_views",
"=",
"False",
")",
":",
"from",
"gluon",
".",
"compileapp",
"import",
"compile_application",
",",
"remove_compiled_application",
"folder",
"=",
"apath",
"(",
"app",
",",
"request",
")",
... | compiles the application args: app: application name request: the global request object returns: none if everything went ok . | train | false |
28,627 | def normalizeFeatures(features):
X = numpy.array([])
for (count, f) in enumerate(features):
if (f.shape[0] > 0):
if (count == 0):
X = f
else:
X = numpy.vstack((X, f))
count += 1
MEAN = numpy.mean(X, axis=0)
STD = numpy.std(X, axis=0)
featuresNorm = []
for f in features:
ft = f.copy()
for nSamples in range(f.shape[0]):
ft[nSamples, :] = ((ft[nSamples, :] - MEAN) / STD)
featuresNorm.append(ft)
return (featuresNorm, MEAN, STD)
| [
"def",
"normalizeFeatures",
"(",
"features",
")",
":",
"X",
"=",
"numpy",
".",
"array",
"(",
"[",
"]",
")",
"for",
"(",
"count",
",",
"f",
")",
"in",
"enumerate",
"(",
"features",
")",
":",
"if",
"(",
"f",
".",
"shape",
"[",
"0",
"]",
">",
"0",... | this function normalizes a feature set to 0-mean and 1-std . | train | false |
28,628 | def subtract_time_from_date(date, time, result_format='timestamp', exclude_millis=False, date_format=None):
date = (Date(date, date_format) - Time(time))
return date.convert(result_format, millis=is_falsy(exclude_millis))
| [
"def",
"subtract_time_from_date",
"(",
"date",
",",
"time",
",",
"result_format",
"=",
"'timestamp'",
",",
"exclude_millis",
"=",
"False",
",",
"date_format",
"=",
"None",
")",
":",
"date",
"=",
"(",
"Date",
"(",
"date",
",",
"date_format",
")",
"-",
"Time... | subtracts time from date and returns the resulting date . | train | false |
28,629 | def _DecideFuncType(user_str):
if user_str[0].islower():
return SIMPLE_FUNC
else:
return ENHANCED_FUNC
| [
"def",
"_DecideFuncType",
"(",
"user_str",
")",
":",
"if",
"user_str",
"[",
"0",
"]",
".",
"islower",
"(",
")",
":",
"return",
"SIMPLE_FUNC",
"else",
":",
"return",
"ENHANCED_FUNC"
] | by default . | train | false |
28,630 | def minimizeHTML(s):
s = re.sub('<span style="font-weight:600;">(.*?)</span>', '<b>\\1</b>', s)
s = re.sub('<span style="font-style:italic;">(.*?)</span>', '<i>\\1</i>', s)
s = re.sub('<span style="text-decoration: underline;">(.*?)</span>', '<u>\\1</u>', s)
return s
| [
"def",
"minimizeHTML",
"(",
"s",
")",
":",
"s",
"=",
"re",
".",
"sub",
"(",
"'<span style=\"font-weight:600;\">(.*?)</span>'",
",",
"'<b>\\\\1</b>'",
",",
"s",
")",
"s",
"=",
"re",
".",
"sub",
"(",
"'<span style=\"font-style:italic;\">(.*?)</span>'",
",",
"'<i>\\\... | correct qts verbose bold/underline/etc . | train | false |
28,631 | def dup_deflate(f, K):
if (dup_degree(f) <= 0):
return (1, f)
g = 0
for i in range(len(f)):
if (not f[((- i) - 1)]):
continue
g = igcd(g, i)
if (g == 1):
return (1, f)
return (g, f[::g])
| [
"def",
"dup_deflate",
"(",
"f",
",",
"K",
")",
":",
"if",
"(",
"dup_degree",
"(",
"f",
")",
"<=",
"0",
")",
":",
"return",
"(",
"1",
",",
"f",
")",
"g",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"f",
")",
")",
":",
"if",
"(",
... | map x**m to y in a polynomial in k[x] . | train | false |
28,632 | def hyper_as_trig(rv):
from sympy.simplify.simplify import signsimp
from sympy.simplify.radsimp import collect
trigs = rv.atoms(TrigonometricFunction)
reps = [(t, Dummy()) for t in trigs]
masked = rv.xreplace(dict(reps))
reps = [(v, k) for (k, v) in reps]
d = Dummy()
return (_osborne(masked, d), (lambda x: collect(signsimp(_osbornei(x, d).xreplace(dict(reps))), S.ImaginaryUnit)))
| [
"def",
"hyper_as_trig",
"(",
"rv",
")",
":",
"from",
"sympy",
".",
"simplify",
".",
"simplify",
"import",
"signsimp",
"from",
"sympy",
".",
"simplify",
".",
"radsimp",
"import",
"collect",
"trigs",
"=",
"rv",
".",
"atoms",
"(",
"TrigonometricFunction",
")",
... | return an expression containing hyperbolic functions in terms of trigonometric functions . | train | false |
28,633 | @handle_response_format
@treeio_login_required
def dashboard_widget_delete(request, widget_id, response_format='html'):
widget = get_object_or_404(Widget, pk=widget_id)
if (widget.user == request.user.profile):
widget.delete()
return HttpResponseRedirect(reverse('core_dashboard_index'))
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"dashboard_widget_delete",
"(",
"request",
",",
"widget_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"widget",
"=",
"get_object_or_404",
"(",
"Widget",
",",
"pk",
"=",
"widget_id",
")",
"... | delete an existing widget from the dashboard . | train | false |
28,634 | def run_postcommit(once_per_request=True, celery=False):
def wrapper(func):
if settings.DEBUG_MODE:
return func
@functools.wraps(func)
def wrapped(*args, **kwargs):
enqueue_postcommit_task(func, args, kwargs, celery=celery, once_per_request=once_per_request)
return wrapped
return wrapper
| [
"def",
"run_postcommit",
"(",
"once_per_request",
"=",
"True",
",",
"celery",
"=",
"False",
")",
":",
"def",
"wrapper",
"(",
"func",
")",
":",
"if",
"settings",
".",
"DEBUG_MODE",
":",
"return",
"func",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
... | delays function execution until after the requests transaction has been committed . | train | false |
28,635 | def get_html(url, config=None, response=None):
FAIL_ENCODING = 'ISO-8859-1'
config = (config or Configuration())
useragent = config.browser_user_agent
timeout = config.request_timeout
if (response is not None):
if (response.encoding != FAIL_ENCODING):
return response.text
return response.content
try:
html = None
response = requests.get(url=url, **get_request_kwargs(timeout, useragent))
if (response.encoding != FAIL_ENCODING):
html = response.text
else:
html = response.content
if config.http_success_only:
response.raise_for_status()
if (html is None):
html = ''
return html
except requests.exceptions.RequestException as e:
log.debug(('%s on %s' % (e, url)))
return ''
| [
"def",
"get_html",
"(",
"url",
",",
"config",
"=",
"None",
",",
"response",
"=",
"None",
")",
":",
"FAIL_ENCODING",
"=",
"'ISO-8859-1'",
"config",
"=",
"(",
"config",
"or",
"Configuration",
"(",
")",
")",
"useragent",
"=",
"config",
".",
"browser_user_agen... | retrieves the html for either a url or a response object . | train | false |
28,636 | @pytest.mark.skipif('not HAS_YAML')
def test_write_read_roundtrip():
t = T_DTYPES
for delimiter in DELIMITERS:
out = StringIO()
t.write(out, format='ascii.ecsv', delimiter=delimiter)
t2s = [Table.read(out.getvalue(), format='ascii.ecsv'), Table.read(out.getvalue(), format='ascii'), ascii.read(out.getvalue()), ascii.read(out.getvalue(), format='ecsv', guess=False), ascii.read(out.getvalue(), format='ecsv')]
for t2 in t2s:
assert (t.meta == t2.meta)
for name in t.colnames:
assert t[name].attrs_equal(t2[name])
assert np.all((t[name] == t2[name]))
| [
"@",
"pytest",
".",
"mark",
".",
"skipif",
"(",
"'not HAS_YAML'",
")",
"def",
"test_write_read_roundtrip",
"(",
")",
":",
"t",
"=",
"T_DTYPES",
"for",
"delimiter",
"in",
"DELIMITERS",
":",
"out",
"=",
"StringIO",
"(",
")",
"t",
".",
"write",
"(",
"out",
... | write a full-featured table with all types and see that it round-trips on readback . | train | false |
28,637 | def add_queue_name_prefix(name):
return (_get_queue_name_prefix() + name)
| [
"def",
"add_queue_name_prefix",
"(",
"name",
")",
":",
"return",
"(",
"_get_queue_name_prefix",
"(",
")",
"+",
"name",
")"
] | prefix a queue name . | train | false |
28,639 | def vo_raise(exception_class, args=(), config=None, pos=None):
if (config is None):
config = {}
raise exception_class(args, config, pos)
| [
"def",
"vo_raise",
"(",
"exception_class",
",",
"args",
"=",
"(",
")",
",",
"config",
"=",
"None",
",",
"pos",
"=",
"None",
")",
":",
"if",
"(",
"config",
"is",
"None",
")",
":",
"config",
"=",
"{",
"}",
"raise",
"exception_class",
"(",
"args",
","... | raise an exception . | train | false |
28,640 | def transient_internet():
time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
return contextlib.nested(time_out, socket_peer_reset, ioerror_peer_reset)
| [
"def",
"transient_internet",
"(",
")",
":",
"time_out",
"=",
"TransientResource",
"(",
"IOError",
",",
"errno",
"=",
"errno",
".",
"ETIMEDOUT",
")",
"socket_peer_reset",
"=",
"TransientResource",
"(",
"socket",
".",
"error",
",",
"errno",
"=",
"errno",
".",
... | return a context manager that raises resourcedenied when various issues with the internet connection manifest themselves as exceptions . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.