id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
3,898
|
def file_write_safe(path, data):
fh = tempfile.NamedTemporaryFile(mode='w', delete=False)
fh.write(data)
fh.close()
cmd = ('mv "%s" "%s"' % (fh.name, path))
sh(cmd)
|
[
"def",
"file_write_safe",
"(",
"path",
",",
"data",
")",
":",
"fh",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"mode",
"=",
"'w'",
",",
"delete",
"=",
"False",
")",
"fh",
".",
"write",
"(",
"data",
")",
"fh",
".",
"close",
"(",
")",
"cmd",
"=",
"(",
"'mv \"%s\" \"%s\"'",
"%",
"(",
"fh",
".",
"name",
",",
"path",
")",
")",
"sh",
"(",
"cmd",
")"
] |
write data to a temporary file .
|
train
| false
|
3,901
|
def _validate_footer_timestamp(vdi_path):
check_cmd = ('vhd-util check -n %(vdi_path)s -p' % locals())
check_proc = make_subprocess(check_cmd, stdout=True, stderr=True)
(out, err) = finish_subprocess(check_proc, check_cmd, ok_exit_codes=[0, 22])
first_line = out.splitlines()[0].strip()
if ('primary footer invalid' in first_line):
raise Exception(("VDI '%(vdi_path)s' has timestamp in the future, ensure source and destination host machines have time set correctly" % locals()))
elif (check_proc.returncode != 0):
raise Exception(("Unexpected output '%(out)s' from vhd-util" % locals()))
|
[
"def",
"_validate_footer_timestamp",
"(",
"vdi_path",
")",
":",
"check_cmd",
"=",
"(",
"'vhd-util check -n %(vdi_path)s -p'",
"%",
"locals",
"(",
")",
")",
"check_proc",
"=",
"make_subprocess",
"(",
"check_cmd",
",",
"stdout",
"=",
"True",
",",
"stderr",
"=",
"True",
")",
"(",
"out",
",",
"err",
")",
"=",
"finish_subprocess",
"(",
"check_proc",
",",
"check_cmd",
",",
"ok_exit_codes",
"=",
"[",
"0",
",",
"22",
"]",
")",
"first_line",
"=",
"out",
".",
"splitlines",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"if",
"(",
"'primary footer invalid'",
"in",
"first_line",
")",
":",
"raise",
"Exception",
"(",
"(",
"\"VDI '%(vdi_path)s' has timestamp in the future, ensure source and destination host machines have time set correctly\"",
"%",
"locals",
"(",
")",
")",
")",
"elif",
"(",
"check_proc",
".",
"returncode",
"!=",
"0",
")",
":",
"raise",
"Exception",
"(",
"(",
"\"Unexpected output '%(out)s' from vhd-util\"",
"%",
"locals",
"(",
")",
")",
")"
] |
this check ensures that the timestamps listed in the vhd footer arent in the future .
|
train
| false
|
3,902
|
def test_parametric_styles_with_parameters():
line = Line(style=RotateStyle('#de3804', step=12, max_=180, base_style=LightStyle))
line.add('_', [1, 2, 3])
line.x_labels = 'abc'
assert line.render()
|
[
"def",
"test_parametric_styles_with_parameters",
"(",
")",
":",
"line",
"=",
"Line",
"(",
"style",
"=",
"RotateStyle",
"(",
"'#de3804'",
",",
"step",
"=",
"12",
",",
"max_",
"=",
"180",
",",
"base_style",
"=",
"LightStyle",
")",
")",
"line",
".",
"add",
"(",
"'_'",
",",
"[",
"1",
",",
"2",
",",
"3",
"]",
")",
"line",
".",
"x_labels",
"=",
"'abc'",
"assert",
"line",
".",
"render",
"(",
")"
] |
test a parametric style with parameters .
|
train
| false
|
3,903
|
def _create_diffs_for_sets(current_path, set_a, set_b):
resulting_diffs = pvector([]).evolver()
for item in set_a.difference(set_b):
resulting_diffs.append(_Remove(path=current_path, item=item))
for item in set_b.difference(set_a):
resulting_diffs.append(_Add(path=current_path, item=item))
return resulting_diffs.persistent()
|
[
"def",
"_create_diffs_for_sets",
"(",
"current_path",
",",
"set_a",
",",
"set_b",
")",
":",
"resulting_diffs",
"=",
"pvector",
"(",
"[",
"]",
")",
".",
"evolver",
"(",
")",
"for",
"item",
"in",
"set_a",
".",
"difference",
"(",
"set_b",
")",
":",
"resulting_diffs",
".",
"append",
"(",
"_Remove",
"(",
"path",
"=",
"current_path",
",",
"item",
"=",
"item",
")",
")",
"for",
"item",
"in",
"set_b",
".",
"difference",
"(",
"set_a",
")",
":",
"resulting_diffs",
".",
"append",
"(",
"_Add",
"(",
"path",
"=",
"current_path",
",",
"item",
"=",
"item",
")",
")",
"return",
"resulting_diffs",
".",
"persistent",
"(",
")"
] |
computes a series of _idiffchange s to turn set_a into set_b assuming that these sets are at current_path inside a nested pyrsistent object .
|
train
| false
|
3,906
|
@yield_once
def icollect_bears(bear_dir_glob, bear_globs, kinds, log_printer):
for (bear_dir, dir_glob) in filter((lambda x: os.path.isdir(x[0])), icollect(bear_dir_glob)):
bear_dir = glob_escape(bear_dir)
for bear_glob in bear_globs:
for matching_file in iglob(os.path.join(bear_dir, (bear_glob + '.py'))):
try:
for bear in _import_bears(matching_file, kinds):
(yield (bear, bear_glob))
except pkg_resources.VersionConflict as exception:
log_printer.log_exception('Unable to collect bears from {file} because there is a conflict with the version of a dependency you have installed. This may be resolved by creating a separate virtual environment for coala or running `pip install "{pkg}"`. Be aware that the latter solution might break other python packages that depend on the currently installed version.'.format(file=matching_file, pkg=exception.req), exception, log_level=LOG_LEVEL.WARNING)
except BaseException as exception:
log_printer.log_exception('Unable to collect bears from {file}. Probably the file is malformed or the module code raises an exception.'.format(file=matching_file), exception, log_level=LOG_LEVEL.WARNING)
|
[
"@",
"yield_once",
"def",
"icollect_bears",
"(",
"bear_dir_glob",
",",
"bear_globs",
",",
"kinds",
",",
"log_printer",
")",
":",
"for",
"(",
"bear_dir",
",",
"dir_glob",
")",
"in",
"filter",
"(",
"(",
"lambda",
"x",
":",
"os",
".",
"path",
".",
"isdir",
"(",
"x",
"[",
"0",
"]",
")",
")",
",",
"icollect",
"(",
"bear_dir_glob",
")",
")",
":",
"bear_dir",
"=",
"glob_escape",
"(",
"bear_dir",
")",
"for",
"bear_glob",
"in",
"bear_globs",
":",
"for",
"matching_file",
"in",
"iglob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"bear_dir",
",",
"(",
"bear_glob",
"+",
"'.py'",
")",
")",
")",
":",
"try",
":",
"for",
"bear",
"in",
"_import_bears",
"(",
"matching_file",
",",
"kinds",
")",
":",
"(",
"yield",
"(",
"bear",
",",
"bear_glob",
")",
")",
"except",
"pkg_resources",
".",
"VersionConflict",
"as",
"exception",
":",
"log_printer",
".",
"log_exception",
"(",
"'Unable to collect bears from {file} because there is a conflict with the version of a dependency you have installed. This may be resolved by creating a separate virtual environment for coala or running `pip install \"{pkg}\"`. Be aware that the latter solution might break other python packages that depend on the currently installed version.'",
".",
"format",
"(",
"file",
"=",
"matching_file",
",",
"pkg",
"=",
"exception",
".",
"req",
")",
",",
"exception",
",",
"log_level",
"=",
"LOG_LEVEL",
".",
"WARNING",
")",
"except",
"BaseException",
"as",
"exception",
":",
"log_printer",
".",
"log_exception",
"(",
"'Unable to collect bears from {file}. Probably the file is malformed or the module code raises an exception.'",
".",
"format",
"(",
"file",
"=",
"matching_file",
")",
",",
"exception",
",",
"log_level",
"=",
"LOG_LEVEL",
".",
"WARNING",
")"
] |
collect all bears from bear directories that have a matching kind .
|
train
| false
|
3,909
|
def delete_subnet(subnet, profile=None):
conn = _auth(profile)
return conn.delete_subnet(subnet)
|
[
"def",
"delete_subnet",
"(",
"subnet",
",",
"profile",
"=",
"None",
")",
":",
"conn",
"=",
"_auth",
"(",
"profile",
")",
"return",
"conn",
".",
"delete_subnet",
"(",
"subnet",
")"
] |
given a subnet id or name .
|
train
| false
|
3,910
|
def looks_like_issubclass(obj, classname):
t = obj
if (t.__name__ == classname):
return True
for klass in t.__mro__:
if (klass.__name__ == classname):
return True
return False
|
[
"def",
"looks_like_issubclass",
"(",
"obj",
",",
"classname",
")",
":",
"t",
"=",
"obj",
"if",
"(",
"t",
".",
"__name__",
"==",
"classname",
")",
":",
"return",
"True",
"for",
"klass",
"in",
"t",
".",
"__mro__",
":",
"if",
"(",
"klass",
".",
"__name__",
"==",
"classname",
")",
":",
"return",
"True",
"return",
"False"
] |
return true if the object has a class or superclass with the given class name .
|
train
| true
|
3,911
|
def kill_processtree(pid, signum):
family_pids = get_processtree_pids(pid)
for _pid in family_pids:
os.kill(_pid, signum)
|
[
"def",
"kill_processtree",
"(",
"pid",
",",
"signum",
")",
":",
"family_pids",
"=",
"get_processtree_pids",
"(",
"pid",
")",
"for",
"_pid",
"in",
"family_pids",
":",
"os",
".",
"kill",
"(",
"_pid",
",",
"signum",
")"
] |
kill a process and all its descendants .
|
train
| false
|
3,913
|
@pytest.mark.skipif('not HAS_PATHLIB')
def test_votable_path_object():
fpath = pathlib.Path(get_pkg_data_filename('data/names.xml'))
table = parse(fpath).get_first_table().to_table()
assert (len(table) == 1)
assert (int(table[0][3]) == 266)
|
[
"@",
"pytest",
".",
"mark",
".",
"skipif",
"(",
"'not HAS_PATHLIB'",
")",
"def",
"test_votable_path_object",
"(",
")",
":",
"fpath",
"=",
"pathlib",
".",
"Path",
"(",
"get_pkg_data_filename",
"(",
"'data/names.xml'",
")",
")",
"table",
"=",
"parse",
"(",
"fpath",
")",
".",
"get_first_table",
"(",
")",
".",
"to_table",
"(",
")",
"assert",
"(",
"len",
"(",
"table",
")",
"==",
"1",
")",
"assert",
"(",
"int",
"(",
"table",
"[",
"0",
"]",
"[",
"3",
"]",
")",
"==",
"266",
")"
] |
testing when votable is passed as pathlib .
|
train
| false
|
3,916
|
def test_cache_clear_activated(config_stub, tmpdir):
config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': False}}
disk_cache = cache.DiskCache(str(tmpdir))
assert (disk_cache.cacheSize() == 0)
preload_cache(disk_cache)
assert (disk_cache.cacheSize() != 0)
disk_cache.clear()
assert (disk_cache.cacheSize() == 0)
|
[
"def",
"test_cache_clear_activated",
"(",
"config_stub",
",",
"tmpdir",
")",
":",
"config_stub",
".",
"data",
"=",
"{",
"'storage'",
":",
"{",
"'cache-size'",
":",
"1024",
"}",
",",
"'general'",
":",
"{",
"'private-browsing'",
":",
"False",
"}",
"}",
"disk_cache",
"=",
"cache",
".",
"DiskCache",
"(",
"str",
"(",
"tmpdir",
")",
")",
"assert",
"(",
"disk_cache",
".",
"cacheSize",
"(",
")",
"==",
"0",
")",
"preload_cache",
"(",
"disk_cache",
")",
"assert",
"(",
"disk_cache",
".",
"cacheSize",
"(",
")",
"!=",
"0",
")",
"disk_cache",
".",
"clear",
"(",
")",
"assert",
"(",
"disk_cache",
".",
"cacheSize",
"(",
")",
"==",
"0",
")"
] |
test if cache is empty after clearing it .
|
train
| false
|
3,917
|
def get_sr_path(session):
sr_ref = safe_find_sr(session)
sr_rec = session.call_xenapi('SR.get_record', sr_ref)
sr_uuid = sr_rec['uuid']
return os.path.join(CONF.xenapi_sr_base_path, sr_uuid)
|
[
"def",
"get_sr_path",
"(",
"session",
")",
":",
"sr_ref",
"=",
"safe_find_sr",
"(",
"session",
")",
"sr_rec",
"=",
"session",
".",
"call_xenapi",
"(",
"'SR.get_record'",
",",
"sr_ref",
")",
"sr_uuid",
"=",
"sr_rec",
"[",
"'uuid'",
"]",
"return",
"os",
".",
"path",
".",
"join",
"(",
"CONF",
".",
"xenapi_sr_base_path",
",",
"sr_uuid",
")"
] |
return the path to our storage repository this is used when were dealing with vhds directly .
|
train
| false
|
3,919
|
def is_ax_end(r):
if isinstance(r, GlyphRenderer):
try:
if (r.data_source.data['name'] == 'ax_end'):
return True
except KeyError:
return False
else:
return False
|
[
"def",
"is_ax_end",
"(",
"r",
")",
":",
"if",
"isinstance",
"(",
"r",
",",
"GlyphRenderer",
")",
":",
"try",
":",
"if",
"(",
"r",
".",
"data_source",
".",
"data",
"[",
"'name'",
"]",
"==",
"'ax_end'",
")",
":",
"return",
"True",
"except",
"KeyError",
":",
"return",
"False",
"else",
":",
"return",
"False"
] |
check if the name in the glyphs datasource is ax_end .
|
train
| false
|
3,921
|
def check_action_edition_permission(view_func):
def decorate(request, *args, **kwargs):
action = kwargs.get('action')
Job.objects.can_edit_or_exception(request, action.workflow)
return view_func(request, *args, **kwargs)
return wraps(view_func)(decorate)
|
[
"def",
"check_action_edition_permission",
"(",
"view_func",
")",
":",
"def",
"decorate",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"action",
"=",
"kwargs",
".",
"get",
"(",
"'action'",
")",
"Job",
".",
"objects",
".",
"can_edit_or_exception",
"(",
"request",
",",
"action",
".",
"workflow",
")",
"return",
"view_func",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"wraps",
"(",
"view_func",
")",
"(",
"decorate",
")"
] |
decorator ensuring that the user has the permissions to modify a workflow action .
|
train
| false
|
3,923
|
def connect_machinelearning(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
from boto.machinelearning.layer1 import MachineLearningConnection
return MachineLearningConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs)
|
[
"def",
"connect_machinelearning",
"(",
"aws_access_key_id",
"=",
"None",
",",
"aws_secret_access_key",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"from",
"boto",
".",
"machinelearning",
".",
"layer1",
"import",
"MachineLearningConnection",
"return",
"MachineLearningConnection",
"(",
"aws_access_key_id",
"=",
"aws_access_key_id",
",",
"aws_secret_access_key",
"=",
"aws_secret_access_key",
",",
"**",
"kwargs",
")"
] |
connect to amazon machine learning service rtype: :class:boto .
|
train
| false
|
3,924
|
def normalize_provider_facts(provider, metadata):
if ((provider is None) or (metadata is None)):
return {}
facts = dict(name=provider, metadata=metadata, network=dict(interfaces=[], ipv6_enabled=False))
if (provider == 'gce'):
facts = normalize_gce_facts(metadata, facts)
elif (provider == 'aws'):
facts = normalize_aws_facts(metadata, facts)
elif (provider == 'openstack'):
facts = normalize_openstack_facts(metadata, facts)
return facts
|
[
"def",
"normalize_provider_facts",
"(",
"provider",
",",
"metadata",
")",
":",
"if",
"(",
"(",
"provider",
"is",
"None",
")",
"or",
"(",
"metadata",
"is",
"None",
")",
")",
":",
"return",
"{",
"}",
"facts",
"=",
"dict",
"(",
"name",
"=",
"provider",
",",
"metadata",
"=",
"metadata",
",",
"network",
"=",
"dict",
"(",
"interfaces",
"=",
"[",
"]",
",",
"ipv6_enabled",
"=",
"False",
")",
")",
"if",
"(",
"provider",
"==",
"'gce'",
")",
":",
"facts",
"=",
"normalize_gce_facts",
"(",
"metadata",
",",
"facts",
")",
"elif",
"(",
"provider",
"==",
"'aws'",
")",
":",
"facts",
"=",
"normalize_aws_facts",
"(",
"metadata",
",",
"facts",
")",
"elif",
"(",
"provider",
"==",
"'openstack'",
")",
":",
"facts",
"=",
"normalize_openstack_facts",
"(",
"metadata",
",",
"facts",
")",
"return",
"facts"
] |
normalize provider facts args: provider : host provider metadata : provider metadata returns: dict: the normalized provider facts .
|
train
| false
|
3,925
|
def reinit():
global _initialized, _loaded_packages
_initialized = False
del _loaded_packages[:]
REGISTERED_TYPES.clear()
_init()
|
[
"def",
"reinit",
"(",
")",
":",
"global",
"_initialized",
",",
"_loaded_packages",
"_initialized",
"=",
"False",
"del",
"_loaded_packages",
"[",
":",
"]",
"REGISTERED_TYPES",
".",
"clear",
"(",
")",
"_init",
"(",
")"
] |
prepare the gevent hub to run in a new process .
|
train
| false
|
3,926
|
def get_profile_visibility(user_profile, user, configuration=None):
if user_profile.requires_parental_consent():
return PRIVATE_VISIBILITY
if (not configuration):
configuration = settings.ACCOUNT_VISIBILITY_CONFIGURATION
profile_privacy = UserPreference.get_value(user, ACCOUNT_VISIBILITY_PREF_KEY)
return (profile_privacy if profile_privacy else configuration.get('default_visibility'))
|
[
"def",
"get_profile_visibility",
"(",
"user_profile",
",",
"user",
",",
"configuration",
"=",
"None",
")",
":",
"if",
"user_profile",
".",
"requires_parental_consent",
"(",
")",
":",
"return",
"PRIVATE_VISIBILITY",
"if",
"(",
"not",
"configuration",
")",
":",
"configuration",
"=",
"settings",
".",
"ACCOUNT_VISIBILITY_CONFIGURATION",
"profile_privacy",
"=",
"UserPreference",
".",
"get_value",
"(",
"user",
",",
"ACCOUNT_VISIBILITY_PREF_KEY",
")",
"return",
"(",
"profile_privacy",
"if",
"profile_privacy",
"else",
"configuration",
".",
"get",
"(",
"'default_visibility'",
")",
")"
] |
returns the visibility level for the specified user profile .
|
train
| false
|
3,928
|
def interpolate_cache_slug(string):
cache_slug = utils.get_asset_dir_prefix()
return parse_string(string, {'cache_slug': cache_slug})
|
[
"def",
"interpolate_cache_slug",
"(",
"string",
")",
":",
"cache_slug",
"=",
"utils",
".",
"get_asset_dir_prefix",
"(",
")",
"return",
"parse_string",
"(",
"string",
",",
"{",
"'cache_slug'",
":",
"cache_slug",
"}",
")"
] |
parses the cache slug in the input string .
|
train
| false
|
3,929
|
def compute_a(n):
a = [(mp.sqrt(2) / 2)]
for k in range(1, n):
ak = (a[(-1)] / k)
for j in range(1, len(a)):
ak -= ((a[j] * a[(- j)]) / (j + 1))
ak /= (a[0] * (1 + (mp.mpf(1) / (k + 1))))
a.append(ak)
return a
|
[
"def",
"compute_a",
"(",
"n",
")",
":",
"a",
"=",
"[",
"(",
"mp",
".",
"sqrt",
"(",
"2",
")",
"/",
"2",
")",
"]",
"for",
"k",
"in",
"range",
"(",
"1",
",",
"n",
")",
":",
"ak",
"=",
"(",
"a",
"[",
"(",
"-",
"1",
")",
"]",
"/",
"k",
")",
"for",
"j",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"a",
")",
")",
":",
"ak",
"-=",
"(",
"(",
"a",
"[",
"j",
"]",
"*",
"a",
"[",
"(",
"-",
"j",
")",
"]",
")",
"/",
"(",
"j",
"+",
"1",
")",
")",
"ak",
"/=",
"(",
"a",
"[",
"0",
"]",
"*",
"(",
"1",
"+",
"(",
"mp",
".",
"mpf",
"(",
"1",
")",
"/",
"(",
"k",
"+",
"1",
")",
")",
")",
")",
"a",
".",
"append",
"(",
"ak",
")",
"return",
"a"
] |
a_k from dlmf 5 .
|
train
| false
|
3,930
|
def _set_color_mode_to_rgb(image):
return image.convert('RGB')
|
[
"def",
"_set_color_mode_to_rgb",
"(",
"image",
")",
":",
"return",
"image",
".",
"convert",
"(",
"'RGB'",
")"
] |
given a pil .
|
train
| false
|
3,931
|
def is_nested_record(measure):
if (not isrecord(measure)):
raise TypeError(('Input must be a Record type got %s of type %r' % (measure, type(measure).__name__)))
return (not all((isscalar(getattr(t, 'key', t)) for t in measure.types)))
|
[
"def",
"is_nested_record",
"(",
"measure",
")",
":",
"if",
"(",
"not",
"isrecord",
"(",
"measure",
")",
")",
":",
"raise",
"TypeError",
"(",
"(",
"'Input must be a Record type got %s of type %r'",
"%",
"(",
"measure",
",",
"type",
"(",
"measure",
")",
".",
"__name__",
")",
")",
")",
"return",
"(",
"not",
"all",
"(",
"(",
"isscalar",
"(",
"getattr",
"(",
"t",
",",
"'key'",
",",
"t",
")",
")",
"for",
"t",
"in",
"measure",
".",
"types",
")",
")",
")"
] |
predicate for checking whether measure is a nested record dshape examples .
|
train
| false
|
3,932
|
@handle_response_format
@treeio_login_required
@module_admin_required()
def user_delete(request, user_id, response_format='html'):
profile = get_object_or_404(User, pk=user_id)
message = ''
if (profile == request.user.profile):
message = _('This is you!')
elif request.POST:
if ('delete' in request.POST):
profile.delete()
return HttpResponseRedirect(reverse('core_admin_index_users'))
elif ('cancel' in request.POST):
return HttpResponseRedirect(reverse('core_admin_user_view', args=[profile.id]))
return render_to_response('core/administration/user_delete', {'profile': profile, 'message': message}, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"handle_response_format",
"@",
"treeio_login_required",
"@",
"module_admin_required",
"(",
")",
"def",
"user_delete",
"(",
"request",
",",
"user_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"profile",
"=",
"get_object_or_404",
"(",
"User",
",",
"pk",
"=",
"user_id",
")",
"message",
"=",
"''",
"if",
"(",
"profile",
"==",
"request",
".",
"user",
".",
"profile",
")",
":",
"message",
"=",
"_",
"(",
"'This is you!'",
")",
"elif",
"request",
".",
"POST",
":",
"if",
"(",
"'delete'",
"in",
"request",
".",
"POST",
")",
":",
"profile",
".",
"delete",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'core_admin_index_users'",
")",
")",
"elif",
"(",
"'cancel'",
"in",
"request",
".",
"POST",
")",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'core_admin_user_view'",
",",
"args",
"=",
"[",
"profile",
".",
"id",
"]",
")",
")",
"return",
"render_to_response",
"(",
"'core/administration/user_delete'",
",",
"{",
"'profile'",
":",
"profile",
",",
"'message'",
":",
"message",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
delete a user cli examples: .
|
train
| false
|
3,933
|
def add_output_compression_type(parser):
group = parser.add_mutually_exclusive_group()
group.add_argument(u'--gzip', default=False, action=u'store_true', help=u'Compress output using gzip')
group.add_argument(u'--bzip', default=False, action=u'store_true', help=u'Compress output using bzip2')
|
[
"def",
"add_output_compression_type",
"(",
"parser",
")",
":",
"group",
"=",
"parser",
".",
"add_mutually_exclusive_group",
"(",
")",
"group",
".",
"add_argument",
"(",
"u'--gzip'",
",",
"default",
"=",
"False",
",",
"action",
"=",
"u'store_true'",
",",
"help",
"=",
"u'Compress output using gzip'",
")",
"group",
".",
"add_argument",
"(",
"u'--bzip'",
",",
"default",
"=",
"False",
",",
"action",
"=",
"u'store_true'",
",",
"help",
"=",
"u'Compress output using bzip2'",
")"
] |
add compression arguments to a parser object .
|
train
| false
|
3,935
|
def add_variables_to_context(generator):
context = generator.context
context['relpath_to_site'] = relpath_to_site
context['main_siteurl'] = _MAIN_SITEURL
context['main_lang'] = _MAIN_LANG
context['lang_siteurls'] = _SITE_DB
current_lang = generator.settings['DEFAULT_LANG']
extra_siteurls = _SITE_DB.copy()
extra_siteurls.pop(current_lang)
context['extra_siteurls'] = extra_siteurls
|
[
"def",
"add_variables_to_context",
"(",
"generator",
")",
":",
"context",
"=",
"generator",
".",
"context",
"context",
"[",
"'relpath_to_site'",
"]",
"=",
"relpath_to_site",
"context",
"[",
"'main_siteurl'",
"]",
"=",
"_MAIN_SITEURL",
"context",
"[",
"'main_lang'",
"]",
"=",
"_MAIN_LANG",
"context",
"[",
"'lang_siteurls'",
"]",
"=",
"_SITE_DB",
"current_lang",
"=",
"generator",
".",
"settings",
"[",
"'DEFAULT_LANG'",
"]",
"extra_siteurls",
"=",
"_SITE_DB",
".",
"copy",
"(",
")",
"extra_siteurls",
".",
"pop",
"(",
"current_lang",
")",
"context",
"[",
"'extra_siteurls'",
"]",
"=",
"extra_siteurls"
] |
adds useful iterable variables to template context .
|
train
| true
|
3,936
|
def p_optstep(p):
if (len(p) == 3):
p[0] = p[2]
else:
p[0] = None
|
[
"def",
"p_optstep",
"(",
"p",
")",
":",
"if",
"(",
"len",
"(",
"p",
")",
"==",
"3",
")",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"2",
"]",
"else",
":",
"p",
"[",
"0",
"]",
"=",
"None"
] |
optstep : step expr | empty .
|
train
| false
|
3,937
|
def NumpyAsParameter(numpy_array):
assert (numpy_array.dtype == 'float32'), 'Saved arrays should be float32.'
return numpy_array.tostring()
|
[
"def",
"NumpyAsParameter",
"(",
"numpy_array",
")",
":",
"assert",
"(",
"numpy_array",
".",
"dtype",
"==",
"'float32'",
")",
",",
"'Saved arrays should be float32.'",
"return",
"numpy_array",
".",
"tostring",
"(",
")"
] |
converts a numpy array into a serialized parameter string .
|
train
| false
|
3,940
|
def aliasmbcs():
if (sys.platform == 'win32'):
import locale, codecs
enc = locale.getdefaultlocale()[1]
if enc.startswith('cp'):
try:
codecs.lookup(enc)
except LookupError:
import encodings
encodings._cache[enc] = encodings._unknown
encodings.aliases.aliases[enc] = 'mbcs'
|
[
"def",
"aliasmbcs",
"(",
")",
":",
"if",
"(",
"sys",
".",
"platform",
"==",
"'win32'",
")",
":",
"import",
"locale",
",",
"codecs",
"enc",
"=",
"locale",
".",
"getdefaultlocale",
"(",
")",
"[",
"1",
"]",
"if",
"enc",
".",
"startswith",
"(",
"'cp'",
")",
":",
"try",
":",
"codecs",
".",
"lookup",
"(",
"enc",
")",
"except",
"LookupError",
":",
"import",
"encodings",
"encodings",
".",
"_cache",
"[",
"enc",
"]",
"=",
"encodings",
".",
"_unknown",
"encodings",
".",
"aliases",
".",
"aliases",
"[",
"enc",
"]",
"=",
"'mbcs'"
] |
on windows .
|
train
| true
|
3,941
|
def wtforms2_and_up(func):
if (int(wtforms.__version__[0]) < 2):
func.__test__ = False
return func
|
[
"def",
"wtforms2_and_up",
"(",
"func",
")",
":",
"if",
"(",
"int",
"(",
"wtforms",
".",
"__version__",
"[",
"0",
"]",
")",
"<",
"2",
")",
":",
"func",
".",
"__test__",
"=",
"False",
"return",
"func"
] |
decorator for skipping test if wtforms <2 .
|
train
| false
|
3,942
|
@utils.arg('host', metavar='<host>', help=_('Name of host.'))
@utils.arg('action', metavar='<action>', choices=['set', 'delete'], help=_("Actions: 'set' or 'delete'"))
@utils.arg('metadata', metavar='<key=value>', nargs='+', action='append', default=[], help=_('Metadata to set or delete (only key is necessary on delete)'))
def do_host_meta(cs, args):
hypervisors = cs.hypervisors.search(args.host, servers=True)
for hyper in hypervisors:
metadata = _extract_metadata(args)
if hasattr(hyper, 'servers'):
for server in hyper.servers:
if (args.action == 'set'):
cs.servers.set_meta(server['uuid'], metadata)
elif (args.action == 'delete'):
cs.servers.delete_meta(server['uuid'], metadata.keys())
|
[
"@",
"utils",
".",
"arg",
"(",
"'host'",
",",
"metavar",
"=",
"'<host>'",
",",
"help",
"=",
"_",
"(",
"'Name of host.'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'action'",
",",
"metavar",
"=",
"'<action>'",
",",
"choices",
"=",
"[",
"'set'",
",",
"'delete'",
"]",
",",
"help",
"=",
"_",
"(",
"\"Actions: 'set' or 'delete'\"",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'metadata'",
",",
"metavar",
"=",
"'<key=value>'",
",",
"nargs",
"=",
"'+'",
",",
"action",
"=",
"'append'",
",",
"default",
"=",
"[",
"]",
",",
"help",
"=",
"_",
"(",
"'Metadata to set or delete (only key is necessary on delete)'",
")",
")",
"def",
"do_host_meta",
"(",
"cs",
",",
"args",
")",
":",
"hypervisors",
"=",
"cs",
".",
"hypervisors",
".",
"search",
"(",
"args",
".",
"host",
",",
"servers",
"=",
"True",
")",
"for",
"hyper",
"in",
"hypervisors",
":",
"metadata",
"=",
"_extract_metadata",
"(",
"args",
")",
"if",
"hasattr",
"(",
"hyper",
",",
"'servers'",
")",
":",
"for",
"server",
"in",
"hyper",
".",
"servers",
":",
"if",
"(",
"args",
".",
"action",
"==",
"'set'",
")",
":",
"cs",
".",
"servers",
".",
"set_meta",
"(",
"server",
"[",
"'uuid'",
"]",
",",
"metadata",
")",
"elif",
"(",
"args",
".",
"action",
"==",
"'delete'",
")",
":",
"cs",
".",
"servers",
".",
"delete_meta",
"(",
"server",
"[",
"'uuid'",
"]",
",",
"metadata",
".",
"keys",
"(",
")",
")"
] |
set or delete metadata on all instances of a host .
|
train
| false
|
3,944
|
def fileobj_is_binary(f):
if hasattr(f, 'binary'):
return f.binary
if ((io is not None) and isinstance(f, io.TextIOBase)):
return False
mode = fileobj_mode(f)
if mode:
return ('b' in mode)
else:
return True
|
[
"def",
"fileobj_is_binary",
"(",
"f",
")",
":",
"if",
"hasattr",
"(",
"f",
",",
"'binary'",
")",
":",
"return",
"f",
".",
"binary",
"if",
"(",
"(",
"io",
"is",
"not",
"None",
")",
"and",
"isinstance",
"(",
"f",
",",
"io",
".",
"TextIOBase",
")",
")",
":",
"return",
"False",
"mode",
"=",
"fileobj_mode",
"(",
"f",
")",
"if",
"mode",
":",
"return",
"(",
"'b'",
"in",
"mode",
")",
"else",
":",
"return",
"True"
] |
returns true if the give file or file-like object has a file open in binary mode .
|
train
| false
|
3,945
|
def _read_structure(f, array_desc, struct_desc):
nrows = array_desc['nelements']
columns = struct_desc['tagtable']
dtype = []
for col in columns:
if (col['structure'] or col['array']):
dtype.append(((col['name'].lower(), col['name']), np.object_))
elif (col['typecode'] in DTYPE_DICT):
dtype.append(((col['name'].lower(), col['name']), DTYPE_DICT[col['typecode']]))
else:
raise Exception(('Variable type %i not implemented' % col['typecode']))
structure = np.recarray((nrows,), dtype=dtype)
for i in range(nrows):
for col in columns:
dtype = col['typecode']
if col['structure']:
structure[col['name']][i] = _read_structure(f, struct_desc['arrtable'][col['name']], struct_desc['structtable'][col['name']])
elif col['array']:
structure[col['name']][i] = _read_array(f, dtype, struct_desc['arrtable'][col['name']])
else:
structure[col['name']][i] = _read_data(f, dtype)
if (array_desc['ndims'] > 1):
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
structure = structure.reshape(dims)
return structure
|
[
"def",
"_read_structure",
"(",
"f",
",",
"array_desc",
",",
"struct_desc",
")",
":",
"nrows",
"=",
"array_desc",
"[",
"'nelements'",
"]",
"columns",
"=",
"struct_desc",
"[",
"'tagtable'",
"]",
"dtype",
"=",
"[",
"]",
"for",
"col",
"in",
"columns",
":",
"if",
"(",
"col",
"[",
"'structure'",
"]",
"or",
"col",
"[",
"'array'",
"]",
")",
":",
"dtype",
".",
"append",
"(",
"(",
"(",
"col",
"[",
"'name'",
"]",
".",
"lower",
"(",
")",
",",
"col",
"[",
"'name'",
"]",
")",
",",
"np",
".",
"object_",
")",
")",
"elif",
"(",
"col",
"[",
"'typecode'",
"]",
"in",
"DTYPE_DICT",
")",
":",
"dtype",
".",
"append",
"(",
"(",
"(",
"col",
"[",
"'name'",
"]",
".",
"lower",
"(",
")",
",",
"col",
"[",
"'name'",
"]",
")",
",",
"DTYPE_DICT",
"[",
"col",
"[",
"'typecode'",
"]",
"]",
")",
")",
"else",
":",
"raise",
"Exception",
"(",
"(",
"'Variable type %i not implemented'",
"%",
"col",
"[",
"'typecode'",
"]",
")",
")",
"structure",
"=",
"np",
".",
"recarray",
"(",
"(",
"nrows",
",",
")",
",",
"dtype",
"=",
"dtype",
")",
"for",
"i",
"in",
"range",
"(",
"nrows",
")",
":",
"for",
"col",
"in",
"columns",
":",
"dtype",
"=",
"col",
"[",
"'typecode'",
"]",
"if",
"col",
"[",
"'structure'",
"]",
":",
"structure",
"[",
"col",
"[",
"'name'",
"]",
"]",
"[",
"i",
"]",
"=",
"_read_structure",
"(",
"f",
",",
"struct_desc",
"[",
"'arrtable'",
"]",
"[",
"col",
"[",
"'name'",
"]",
"]",
",",
"struct_desc",
"[",
"'structtable'",
"]",
"[",
"col",
"[",
"'name'",
"]",
"]",
")",
"elif",
"col",
"[",
"'array'",
"]",
":",
"structure",
"[",
"col",
"[",
"'name'",
"]",
"]",
"[",
"i",
"]",
"=",
"_read_array",
"(",
"f",
",",
"dtype",
",",
"struct_desc",
"[",
"'arrtable'",
"]",
"[",
"col",
"[",
"'name'",
"]",
"]",
")",
"else",
":",
"structure",
"[",
"col",
"[",
"'name'",
"]",
"]",
"[",
"i",
"]",
"=",
"_read_data",
"(",
"f",
",",
"dtype",
")",
"if",
"(",
"array_desc",
"[",
"'ndims'",
"]",
">",
"1",
")",
":",
"dims",
"=",
"array_desc",
"[",
"'dims'",
"]",
"[",
":",
"int",
"(",
"array_desc",
"[",
"'ndims'",
"]",
")",
"]",
"dims",
".",
"reverse",
"(",
")",
"structure",
"=",
"structure",
".",
"reshape",
"(",
"dims",
")",
"return",
"structure"
] |
read a structure .
|
train
| false
|
3,946
|
def provider_identity(request):
response = render_to_response('identity.xml', {'url': get_xrds_url('login', request)}, content_type='text/xml')
response['X-XRDS-Location'] = get_xrds_url('identity', request)
return response
|
[
"def",
"provider_identity",
"(",
"request",
")",
":",
"response",
"=",
"render_to_response",
"(",
"'identity.xml'",
",",
"{",
"'url'",
":",
"get_xrds_url",
"(",
"'login'",
",",
"request",
")",
"}",
",",
"content_type",
"=",
"'text/xml'",
")",
"response",
"[",
"'X-XRDS-Location'",
"]",
"=",
"get_xrds_url",
"(",
"'identity'",
",",
"request",
")",
"return",
"response"
] |
xrds for identity discovery .
|
train
| false
|
3,947
|
def show_driver(devname):
try:
module = ethtool.get_module(devname)
except IOError:
log.error('Driver information not implemented on {0}'.format(devname))
return 'Not implemented'
try:
businfo = ethtool.get_businfo(devname)
except IOError:
log.error('Bus information no available on {0}'.format(devname))
return 'Not available'
ret = {'driver': module, 'bus_info': businfo}
return ret
|
[
"def",
"show_driver",
"(",
"devname",
")",
":",
"try",
":",
"module",
"=",
"ethtool",
".",
"get_module",
"(",
"devname",
")",
"except",
"IOError",
":",
"log",
".",
"error",
"(",
"'Driver information not implemented on {0}'",
".",
"format",
"(",
"devname",
")",
")",
"return",
"'Not implemented'",
"try",
":",
"businfo",
"=",
"ethtool",
".",
"get_businfo",
"(",
"devname",
")",
"except",
"IOError",
":",
"log",
".",
"error",
"(",
"'Bus information no available on {0}'",
".",
"format",
"(",
"devname",
")",
")",
"return",
"'Not available'",
"ret",
"=",
"{",
"'driver'",
":",
"module",
",",
"'bus_info'",
":",
"businfo",
"}",
"return",
"ret"
] |
queries the specified network device for associated driver information cli example: .
|
train
| true
|
3,948
|
def floating_ip_get_by_fixed_address(context, fixed_address):
return IMPL.floating_ip_get_by_fixed_address(context, fixed_address)
|
[
"def",
"floating_ip_get_by_fixed_address",
"(",
"context",
",",
"fixed_address",
")",
":",
"return",
"IMPL",
".",
"floating_ip_get_by_fixed_address",
"(",
"context",
",",
"fixed_address",
")"
] |
get a floating ips by fixed address .
|
train
| false
|
3,949
|
def inplace_swap_row_csc(X, m, n):
for t in [m, n]:
if isinstance(t, np.ndarray):
raise TypeError('m and n should be valid integers')
if (m < 0):
m += X.shape[0]
if (n < 0):
n += X.shape[0]
m_mask = (X.indices == m)
X.indices[(X.indices == n)] = m
X.indices[m_mask] = n
|
[
"def",
"inplace_swap_row_csc",
"(",
"X",
",",
"m",
",",
"n",
")",
":",
"for",
"t",
"in",
"[",
"m",
",",
"n",
"]",
":",
"if",
"isinstance",
"(",
"t",
",",
"np",
".",
"ndarray",
")",
":",
"raise",
"TypeError",
"(",
"'m and n should be valid integers'",
")",
"if",
"(",
"m",
"<",
"0",
")",
":",
"m",
"+=",
"X",
".",
"shape",
"[",
"0",
"]",
"if",
"(",
"n",
"<",
"0",
")",
":",
"n",
"+=",
"X",
".",
"shape",
"[",
"0",
"]",
"m_mask",
"=",
"(",
"X",
".",
"indices",
"==",
"m",
")",
"X",
".",
"indices",
"[",
"(",
"X",
".",
"indices",
"==",
"n",
")",
"]",
"=",
"m",
"X",
".",
"indices",
"[",
"m_mask",
"]",
"=",
"n"
] |
swaps two rows of a csc matrix in-place .
|
train
| false
|
3,950
|
def kl_div(input, target, size_average=True):
return _functions.thnn.KLDivLoss(size_average)(input, target)
|
[
"def",
"kl_div",
"(",
"input",
",",
"target",
",",
"size_average",
"=",
"True",
")",
":",
"return",
"_functions",
".",
"thnn",
".",
"KLDivLoss",
"(",
"size_average",
")",
"(",
"input",
",",
"target",
")"
] |
the kullback-leibler divergence_ loss .
|
train
| false
|
3,951
|
def _is_s3_url(url):
try:
return (parse_url(url).scheme in ['s3', 's3n', 's3a'])
except:
return False
|
[
"def",
"_is_s3_url",
"(",
"url",
")",
":",
"try",
":",
"return",
"(",
"parse_url",
"(",
"url",
")",
".",
"scheme",
"in",
"[",
"'s3'",
",",
"'s3n'",
",",
"'s3a'",
"]",
")",
"except",
":",
"return",
"False"
] |
check for an s3 .
|
train
| false
|
3,954
|
def build_cors_request(url, origin_header_value):
headers = Headers()
if (origin_header_value is not None):
headers['Origin'] = origin_header_value.strip()
forged_req = FuzzableRequest(url, 'GET', headers=headers)
return forged_req
|
[
"def",
"build_cors_request",
"(",
"url",
",",
"origin_header_value",
")",
":",
"headers",
"=",
"Headers",
"(",
")",
"if",
"(",
"origin_header_value",
"is",
"not",
"None",
")",
":",
"headers",
"[",
"'Origin'",
"]",
"=",
"origin_header_value",
".",
"strip",
"(",
")",
"forged_req",
"=",
"FuzzableRequest",
"(",
"url",
",",
"'GET'",
",",
"headers",
"=",
"headers",
")",
"return",
"forged_req"
] |
method to generate a "get" cors http request based on input context .
|
train
| false
|
3,955
|
def should_renew(config, lineage):
if config.renew_by_default:
logger.debug('Auto-renewal forced with --force-renewal...')
return True
if lineage.should_autorenew(interactive=True):
logger.info('Cert is due for renewal, auto-renewing...')
return True
if config.dry_run:
logger.info('Cert not due for renewal, but simulating renewal for dry run')
return True
logger.info('Cert not yet due for renewal')
return False
|
[
"def",
"should_renew",
"(",
"config",
",",
"lineage",
")",
":",
"if",
"config",
".",
"renew_by_default",
":",
"logger",
".",
"debug",
"(",
"'Auto-renewal forced with --force-renewal...'",
")",
"return",
"True",
"if",
"lineage",
".",
"should_autorenew",
"(",
"interactive",
"=",
"True",
")",
":",
"logger",
".",
"info",
"(",
"'Cert is due for renewal, auto-renewing...'",
")",
"return",
"True",
"if",
"config",
".",
"dry_run",
":",
"logger",
".",
"info",
"(",
"'Cert not due for renewal, but simulating renewal for dry run'",
")",
"return",
"True",
"logger",
".",
"info",
"(",
"'Cert not yet due for renewal'",
")",
"return",
"False"
] |
return true if any of the circumstances for automatic renewal apply .
|
train
| false
|
3,956
|
def get_gridline_path(world, pixel):
mask = (np.isnan(pixel[:, 0]) | np.isnan(pixel[:, 1]))
codes = np.zeros(world.shape[0], dtype=np.uint8)
codes[:] = Path.LINETO
codes[0] = Path.MOVETO
codes[mask] = Path.MOVETO
codes[1:][mask[:(-1)]] = Path.MOVETO
path = Path(pixel, codes=codes)
return path
|
[
"def",
"get_gridline_path",
"(",
"world",
",",
"pixel",
")",
":",
"mask",
"=",
"(",
"np",
".",
"isnan",
"(",
"pixel",
"[",
":",
",",
"0",
"]",
")",
"|",
"np",
".",
"isnan",
"(",
"pixel",
"[",
":",
",",
"1",
"]",
")",
")",
"codes",
"=",
"np",
".",
"zeros",
"(",
"world",
".",
"shape",
"[",
"0",
"]",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
"codes",
"[",
":",
"]",
"=",
"Path",
".",
"LINETO",
"codes",
"[",
"0",
"]",
"=",
"Path",
".",
"MOVETO",
"codes",
"[",
"mask",
"]",
"=",
"Path",
".",
"MOVETO",
"codes",
"[",
"1",
":",
"]",
"[",
"mask",
"[",
":",
"(",
"-",
"1",
")",
"]",
"]",
"=",
"Path",
".",
"MOVETO",
"path",
"=",
"Path",
"(",
"pixel",
",",
"codes",
"=",
"codes",
")",
"return",
"path"
] |
draw a grid line parameters lon_lat : ~numpy .
|
train
| false
|
3,957
|
def _get_all_widgets(request):
user = request.user.profile
perspective = user.get_perspective()
modules = perspective.get_modules()
widgets = {}
for module in modules:
try:
import_name = (module.name + '.widgets')
module_widget_lib = __import__(import_name, fromlist=[str(module.name)])
module_widgets = module_widget_lib.get_widgets(request)
for name in module_widgets:
if ('module_name' not in module_widgets[name]):
module_widgets[name]['module_name'] = module.name
if ('module_title' not in module_widgets[name]):
module_widgets[name]['module_title'] = module.title
module_widgets[name] = _preprocess_widget(module_widgets[name], name)
widgets.update(module_widgets)
except ImportError:
pass
except AttributeError:
pass
return widgets
|
[
"def",
"_get_all_widgets",
"(",
"request",
")",
":",
"user",
"=",
"request",
".",
"user",
".",
"profile",
"perspective",
"=",
"user",
".",
"get_perspective",
"(",
")",
"modules",
"=",
"perspective",
".",
"get_modules",
"(",
")",
"widgets",
"=",
"{",
"}",
"for",
"module",
"in",
"modules",
":",
"try",
":",
"import_name",
"=",
"(",
"module",
".",
"name",
"+",
"'.widgets'",
")",
"module_widget_lib",
"=",
"__import__",
"(",
"import_name",
",",
"fromlist",
"=",
"[",
"str",
"(",
"module",
".",
"name",
")",
"]",
")",
"module_widgets",
"=",
"module_widget_lib",
".",
"get_widgets",
"(",
"request",
")",
"for",
"name",
"in",
"module_widgets",
":",
"if",
"(",
"'module_name'",
"not",
"in",
"module_widgets",
"[",
"name",
"]",
")",
":",
"module_widgets",
"[",
"name",
"]",
"[",
"'module_name'",
"]",
"=",
"module",
".",
"name",
"if",
"(",
"'module_title'",
"not",
"in",
"module_widgets",
"[",
"name",
"]",
")",
":",
"module_widgets",
"[",
"name",
"]",
"[",
"'module_title'",
"]",
"=",
"module",
".",
"title",
"module_widgets",
"[",
"name",
"]",
"=",
"_preprocess_widget",
"(",
"module_widgets",
"[",
"name",
"]",
",",
"name",
")",
"widgets",
".",
"update",
"(",
"module_widgets",
")",
"except",
"ImportError",
":",
"pass",
"except",
"AttributeError",
":",
"pass",
"return",
"widgets"
] |
retrieve widgets from all available modules .
|
train
| false
|
3,958
|
def _complete_python(prefix, line, start, end, ctx):
if (line != ''):
first = line.split()[0]
if ((first in builtins.__xonsh_commands_cache__) and (first not in ctx)):
return set()
filt = get_filter_function()
rtn = {s for s in XONSH_TOKENS if filt(s, prefix)}
if (ctx is not None):
if ('.' in prefix):
rtn |= attr_complete(prefix, ctx, filt)
rtn |= {s for s in ctx if filt(s, prefix)}
rtn |= {s for s in dir(builtins) if filt(s, prefix)}
return rtn
|
[
"def",
"_complete_python",
"(",
"prefix",
",",
"line",
",",
"start",
",",
"end",
",",
"ctx",
")",
":",
"if",
"(",
"line",
"!=",
"''",
")",
":",
"first",
"=",
"line",
".",
"split",
"(",
")",
"[",
"0",
"]",
"if",
"(",
"(",
"first",
"in",
"builtins",
".",
"__xonsh_commands_cache__",
")",
"and",
"(",
"first",
"not",
"in",
"ctx",
")",
")",
":",
"return",
"set",
"(",
")",
"filt",
"=",
"get_filter_function",
"(",
")",
"rtn",
"=",
"{",
"s",
"for",
"s",
"in",
"XONSH_TOKENS",
"if",
"filt",
"(",
"s",
",",
"prefix",
")",
"}",
"if",
"(",
"ctx",
"is",
"not",
"None",
")",
":",
"if",
"(",
"'.'",
"in",
"prefix",
")",
":",
"rtn",
"|=",
"attr_complete",
"(",
"prefix",
",",
"ctx",
",",
"filt",
")",
"rtn",
"|=",
"{",
"s",
"for",
"s",
"in",
"ctx",
"if",
"filt",
"(",
"s",
",",
"prefix",
")",
"}",
"rtn",
"|=",
"{",
"s",
"for",
"s",
"in",
"dir",
"(",
"builtins",
")",
"if",
"filt",
"(",
"s",
",",
"prefix",
")",
"}",
"return",
"rtn"
] |
completes based on the contents of the current python environment .
|
train
| false
|
3,960
|
def _CheckFacetName(name):
return _CheckFieldName(name)
|
[
"def",
"_CheckFacetName",
"(",
"name",
")",
":",
"return",
"_CheckFieldName",
"(",
"name",
")"
] |
checks facet name is not too long and matches facet name pattern .
|
train
| false
|
3,961
|
def _format_report(report):
return ' '.join(report.split())
|
[
"def",
"_format_report",
"(",
"report",
")",
":",
"return",
"' '",
".",
"join",
"(",
"report",
".",
"split",
"(",
")",
")"
] |
private function to reformat the report for testing .
|
train
| false
|
3,963
|
def parse_calendar_response(calendar):
uid = calendar['id']
name = calendar['summary']
role = calendar['accessRole']
read_only = True
if ((role == 'owner') or (role == 'writer')):
read_only = False
description = calendar.get('description', None)
return Calendar(uid=uid, name=name, read_only=read_only, description=description)
|
[
"def",
"parse_calendar_response",
"(",
"calendar",
")",
":",
"uid",
"=",
"calendar",
"[",
"'id'",
"]",
"name",
"=",
"calendar",
"[",
"'summary'",
"]",
"role",
"=",
"calendar",
"[",
"'accessRole'",
"]",
"read_only",
"=",
"True",
"if",
"(",
"(",
"role",
"==",
"'owner'",
")",
"or",
"(",
"role",
"==",
"'writer'",
")",
")",
":",
"read_only",
"=",
"False",
"description",
"=",
"calendar",
".",
"get",
"(",
"'description'",
",",
"None",
")",
"return",
"Calendar",
"(",
"uid",
"=",
"uid",
",",
"name",
"=",
"name",
",",
"read_only",
"=",
"read_only",
",",
"description",
"=",
"description",
")"
] |
constructs a calendar object from a google calendarlist resource .
|
train
| false
|
3,964
|
def schemata(schema_dicts):
return dict(((n, Schema.from_legacy(s)) for (n, s) in schema_dicts.items()))
|
[
"def",
"schemata",
"(",
"schema_dicts",
")",
":",
"return",
"dict",
"(",
"(",
"(",
"n",
",",
"Schema",
".",
"from_legacy",
"(",
"s",
")",
")",
"for",
"(",
"n",
",",
"s",
")",
"in",
"schema_dicts",
".",
"items",
"(",
")",
")",
")"
] |
return dictionary of schema objects for given dictionary of schemata .
|
train
| false
|
3,965
|
def rm_subdirs(path, onerror=None):
names = []
try:
names = os.listdir(path)
except os.error as err:
if (onerror is not None):
onerror(os.listdir, path, sys.exc_info())
else:
raise
for name in names:
fullname = os.path.join(path, name)
if os.path.isdir(fullname):
if (onerror is not None):
shutil.rmtree(fullname, False, onerror)
else:
err_count = 0
while True:
try:
shutil.rmtree(fullname, False, None)
break
except os.error:
if (err_count > 0):
raise
err_count += 1
time.sleep(RM_SUBDIRS_RETRY_TIME)
|
[
"def",
"rm_subdirs",
"(",
"path",
",",
"onerror",
"=",
"None",
")",
":",
"names",
"=",
"[",
"]",
"try",
":",
"names",
"=",
"os",
".",
"listdir",
"(",
"path",
")",
"except",
"os",
".",
"error",
"as",
"err",
":",
"if",
"(",
"onerror",
"is",
"not",
"None",
")",
":",
"onerror",
"(",
"os",
".",
"listdir",
",",
"path",
",",
"sys",
".",
"exc_info",
"(",
")",
")",
"else",
":",
"raise",
"for",
"name",
"in",
"names",
":",
"fullname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"name",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"fullname",
")",
":",
"if",
"(",
"onerror",
"is",
"not",
"None",
")",
":",
"shutil",
".",
"rmtree",
"(",
"fullname",
",",
"False",
",",
"onerror",
")",
"else",
":",
"err_count",
"=",
"0",
"while",
"True",
":",
"try",
":",
"shutil",
".",
"rmtree",
"(",
"fullname",
",",
"False",
",",
"None",
")",
"break",
"except",
"os",
".",
"error",
":",
"if",
"(",
"err_count",
">",
"0",
")",
":",
"raise",
"err_count",
"+=",
"1",
"time",
".",
"sleep",
"(",
"RM_SUBDIRS_RETRY_TIME",
")"
] |
remove all subdirectories in this path .
|
train
| false
|
3,968
|
def get_snapshot_paths(service):
file_list = []
if (service != 'cassandra'):
return file_list
look_for = 'snapshots'
data_dir = '{0}/{1}'.format(APPSCALE_DATA_DIR, service)
for (full_path, _, file) in os.walk(data_dir):
if (look_for in full_path):
file_list.append(full_path)
logging.debug("List of data paths for '{0}': {1}".format(service, file_list))
return file_list
|
[
"def",
"get_snapshot_paths",
"(",
"service",
")",
":",
"file_list",
"=",
"[",
"]",
"if",
"(",
"service",
"!=",
"'cassandra'",
")",
":",
"return",
"file_list",
"look_for",
"=",
"'snapshots'",
"data_dir",
"=",
"'{0}/{1}'",
".",
"format",
"(",
"APPSCALE_DATA_DIR",
",",
"service",
")",
"for",
"(",
"full_path",
",",
"_",
",",
"file",
")",
"in",
"os",
".",
"walk",
"(",
"data_dir",
")",
":",
"if",
"(",
"look_for",
"in",
"full_path",
")",
":",
"file_list",
".",
"append",
"(",
"full_path",
")",
"logging",
".",
"debug",
"(",
"\"List of data paths for '{0}': {1}\"",
".",
"format",
"(",
"service",
",",
"file_list",
")",
")",
"return",
"file_list"
] |
returns a list of file names holding critical data for the given service .
|
train
| false
|
3,969
|
def test_sort_locations_file_not_find_link(data):
finder = PackageFinder([], [], session=PipSession())
(files, urls) = finder._sort_locations([data.index_url('empty_with_pkg')])
assert (urls and (not files)), 'urls, but not files should have been found'
|
[
"def",
"test_sort_locations_file_not_find_link",
"(",
"data",
")",
":",
"finder",
"=",
"PackageFinder",
"(",
"[",
"]",
",",
"[",
"]",
",",
"session",
"=",
"PipSession",
"(",
")",
")",
"(",
"files",
",",
"urls",
")",
"=",
"finder",
".",
"_sort_locations",
"(",
"[",
"data",
".",
"index_url",
"(",
"'empty_with_pkg'",
")",
"]",
")",
"assert",
"(",
"urls",
"and",
"(",
"not",
"files",
")",
")",
",",
"'urls, but not files should have been found'"
] |
test that a file:// url dir thats not a find-link .
|
train
| false
|
3,970
|
def safely_reserve_a_username(cursor, gen_usernames=gen_random_usernames, reserve=insert_into_participants):
cursor.execute('SAVEPOINT safely_reserve_a_username')
seatbelt = 0
for username in gen_usernames():
seatbelt += 1
if (seatbelt > 100):
raise FailedToReserveUsername
try:
check = reserve(cursor, username)
except IntegrityError:
cursor.execute('ROLLBACK TO safely_reserve_a_username')
continue
else:
assert (check == username)
break
else:
raise RanOutOfUsernameAttempts
cursor.execute('RELEASE safely_reserve_a_username')
return username
|
[
"def",
"safely_reserve_a_username",
"(",
"cursor",
",",
"gen_usernames",
"=",
"gen_random_usernames",
",",
"reserve",
"=",
"insert_into_participants",
")",
":",
"cursor",
".",
"execute",
"(",
"'SAVEPOINT safely_reserve_a_username'",
")",
"seatbelt",
"=",
"0",
"for",
"username",
"in",
"gen_usernames",
"(",
")",
":",
"seatbelt",
"+=",
"1",
"if",
"(",
"seatbelt",
">",
"100",
")",
":",
"raise",
"FailedToReserveUsername",
"try",
":",
"check",
"=",
"reserve",
"(",
"cursor",
",",
"username",
")",
"except",
"IntegrityError",
":",
"cursor",
".",
"execute",
"(",
"'ROLLBACK TO safely_reserve_a_username'",
")",
"continue",
"else",
":",
"assert",
"(",
"check",
"==",
"username",
")",
"break",
"else",
":",
"raise",
"RanOutOfUsernameAttempts",
"cursor",
".",
"execute",
"(",
"'RELEASE safely_reserve_a_username'",
")",
"return",
"username"
] |
safely reserve a username .
|
train
| false
|
3,971
|
def _unary_compiler(tmpl):
return (lambda self, x: (tmpl % self.compile(x)))
|
[
"def",
"_unary_compiler",
"(",
"tmpl",
")",
":",
"return",
"(",
"lambda",
"self",
",",
"x",
":",
"(",
"tmpl",
"%",
"self",
".",
"compile",
"(",
"x",
")",
")",
")"
] |
compiler factory for the _compiler .
|
train
| false
|
3,972
|
def async_subscribe(hass, topic, callback, qos=DEFAULT_QOS):
@asyncio.coroutine
def mqtt_topic_subscriber(event):
'Match subscribed MQTT topic.'
if (not _match_topic(topic, event.data[ATTR_TOPIC])):
return
hass.async_run_job(callback, event.data[ATTR_TOPIC], event.data[ATTR_PAYLOAD], event.data[ATTR_QOS])
async_remove = hass.bus.async_listen(EVENT_MQTT_MESSAGE_RECEIVED, mqtt_topic_subscriber)
MQTT_CLIENT.subscribe(topic, qos)
return async_remove
|
[
"def",
"async_subscribe",
"(",
"hass",
",",
"topic",
",",
"callback",
",",
"qos",
"=",
"DEFAULT_QOS",
")",
":",
"@",
"asyncio",
".",
"coroutine",
"def",
"mqtt_topic_subscriber",
"(",
"event",
")",
":",
"if",
"(",
"not",
"_match_topic",
"(",
"topic",
",",
"event",
".",
"data",
"[",
"ATTR_TOPIC",
"]",
")",
")",
":",
"return",
"hass",
".",
"async_run_job",
"(",
"callback",
",",
"event",
".",
"data",
"[",
"ATTR_TOPIC",
"]",
",",
"event",
".",
"data",
"[",
"ATTR_PAYLOAD",
"]",
",",
"event",
".",
"data",
"[",
"ATTR_QOS",
"]",
")",
"async_remove",
"=",
"hass",
".",
"bus",
".",
"async_listen",
"(",
"EVENT_MQTT_MESSAGE_RECEIVED",
",",
"mqtt_topic_subscriber",
")",
"MQTT_CLIENT",
".",
"subscribe",
"(",
"topic",
",",
"qos",
")",
"return",
"async_remove"
] |
subscribe to an mqtt topic .
|
train
| false
|
3,973
|
def test_no_data_with_list_of_none(Chart):
chart = Chart()
chart.add('Serie', [None])
q = chart.render_pyquery()
assert (q('.text-overlay text').text() == 'No data')
|
[
"def",
"test_no_data_with_list_of_none",
"(",
"Chart",
")",
":",
"chart",
"=",
"Chart",
"(",
")",
"chart",
".",
"add",
"(",
"'Serie'",
",",
"[",
"None",
"]",
")",
"q",
"=",
"chart",
".",
"render_pyquery",
"(",
")",
"assert",
"(",
"q",
"(",
"'.text-overlay text'",
")",
".",
"text",
"(",
")",
"==",
"'No data'",
")"
] |
test no data for a none containing serie .
|
train
| false
|
3,974
|
@pytest.mark.skipif('no_real_s3_credentials()')
def test_subdomain_compatible():
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'), os.getenv('AWS_SECRET_ACCESS_KEY'))
bucket_name = bucket_name_mangle('wal-e-test-us-west-1-no-dots')
cinfo = calling_format.from_store_name(bucket_name)
with FreshBucket(bucket_name, host='s3-us-west-1.amazonaws.com', calling_format=connection.OrdinaryCallingFormat()) as fb:
fb.create(location='us-west-1')
conn = cinfo.connect(creds)
assert (cinfo.region is None)
assert (cinfo.calling_format is connection.SubdomainCallingFormat)
assert isinstance(conn.calling_format, connection.SubdomainCallingFormat)
|
[
"@",
"pytest",
".",
"mark",
".",
"skipif",
"(",
"'no_real_s3_credentials()'",
")",
"def",
"test_subdomain_compatible",
"(",
")",
":",
"creds",
"=",
"Credentials",
"(",
"os",
".",
"getenv",
"(",
"'AWS_ACCESS_KEY_ID'",
")",
",",
"os",
".",
"getenv",
"(",
"'AWS_SECRET_ACCESS_KEY'",
")",
")",
"bucket_name",
"=",
"bucket_name_mangle",
"(",
"'wal-e-test-us-west-1-no-dots'",
")",
"cinfo",
"=",
"calling_format",
".",
"from_store_name",
"(",
"bucket_name",
")",
"with",
"FreshBucket",
"(",
"bucket_name",
",",
"host",
"=",
"'s3-us-west-1.amazonaws.com'",
",",
"calling_format",
"=",
"connection",
".",
"OrdinaryCallingFormat",
"(",
")",
")",
"as",
"fb",
":",
"fb",
".",
"create",
"(",
"location",
"=",
"'us-west-1'",
")",
"conn",
"=",
"cinfo",
".",
"connect",
"(",
"creds",
")",
"assert",
"(",
"cinfo",
".",
"region",
"is",
"None",
")",
"assert",
"(",
"cinfo",
".",
"calling_format",
"is",
"connection",
".",
"SubdomainCallingFormat",
")",
"assert",
"isinstance",
"(",
"conn",
".",
"calling_format",
",",
"connection",
".",
"SubdomainCallingFormat",
")"
] |
exercise a case where connecting is region-oblivious .
|
train
| false
|
3,976
|
def get_pynast_version():
try:
import pynast
return pynast.__version__
except ImportError:
return None
|
[
"def",
"get_pynast_version",
"(",
")",
":",
"try",
":",
"import",
"pynast",
"return",
"pynast",
".",
"__version__",
"except",
"ImportError",
":",
"return",
"None"
] |
return pynast version string or none if pynast is not installed .
|
train
| false
|
3,977
|
def get_datastore_ref(si, datastore_name):
inventory = get_inventory(si)
container = inventory.viewManager.CreateContainerView(inventory.rootFolder, [vim.Datastore], True)
for item in container.view:
if (item.name == datastore_name):
return item
return None
|
[
"def",
"get_datastore_ref",
"(",
"si",
",",
"datastore_name",
")",
":",
"inventory",
"=",
"get_inventory",
"(",
"si",
")",
"container",
"=",
"inventory",
".",
"viewManager",
".",
"CreateContainerView",
"(",
"inventory",
".",
"rootFolder",
",",
"[",
"vim",
".",
"Datastore",
"]",
",",
"True",
")",
"for",
"item",
"in",
"container",
".",
"view",
":",
"if",
"(",
"item",
".",
"name",
"==",
"datastore_name",
")",
":",
"return",
"item",
"return",
"None"
] |
get a reference to a vmware datastore for the purposes of adding/removing disks si serviceinstance for the vsphere or esxi server datastore_name name of the datastore .
|
train
| false
|
3,979
|
def get_form_class(backend, request):
form_class = None
form_class_string = facebook_settings.FACEBOOK_REGISTRATION_FORM
if form_class_string:
try:
form_class = get_class_from_string(form_class_string, None)
except ImportError:
pass
if (not form_class):
backend = (backend or get_registration_backend())
if backend:
form_class = backend.get_form_class(request)
assert form_class, 'we couldnt find a form class, so we cant go on like this'
return form_class
|
[
"def",
"get_form_class",
"(",
"backend",
",",
"request",
")",
":",
"form_class",
"=",
"None",
"form_class_string",
"=",
"facebook_settings",
".",
"FACEBOOK_REGISTRATION_FORM",
"if",
"form_class_string",
":",
"try",
":",
"form_class",
"=",
"get_class_from_string",
"(",
"form_class_string",
",",
"None",
")",
"except",
"ImportError",
":",
"pass",
"if",
"(",
"not",
"form_class",
")",
":",
"backend",
"=",
"(",
"backend",
"or",
"get_registration_backend",
"(",
")",
")",
"if",
"backend",
":",
"form_class",
"=",
"backend",
".",
"get_form_class",
"(",
"request",
")",
"assert",
"form_class",
",",
"'we couldnt find a form class, so we cant go on like this'",
"return",
"form_class"
] |
will use registration form in the following order: 1 .
|
train
| false
|
3,980
|
@pytest.mark.parametrize(u'gframe', gcrs_frames)
def test_icrs_gcrs_dist_diff(gframe):
gcrsnod = icrs_coords[0].transform_to(gframe)
gcrswd = icrs_coords[1].transform_to(gframe)
assert (not allclose(gcrswd.ra, gcrsnod.ra, rtol=1e-08, atol=(1e-10 * u.deg)))
assert (not allclose(gcrswd.dec, gcrsnod.dec, rtol=1e-08, atol=(1e-10 * u.deg)))
assert (not allclose(gcrswd.distance, icrs_coords[1].distance, rtol=1e-08, atol=(1e-10 * u.pc)))
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"u'gframe'",
",",
"gcrs_frames",
")",
"def",
"test_icrs_gcrs_dist_diff",
"(",
"gframe",
")",
":",
"gcrsnod",
"=",
"icrs_coords",
"[",
"0",
"]",
".",
"transform_to",
"(",
"gframe",
")",
"gcrswd",
"=",
"icrs_coords",
"[",
"1",
"]",
".",
"transform_to",
"(",
"gframe",
")",
"assert",
"(",
"not",
"allclose",
"(",
"gcrswd",
".",
"ra",
",",
"gcrsnod",
".",
"ra",
",",
"rtol",
"=",
"1e-08",
",",
"atol",
"=",
"(",
"1e-10",
"*",
"u",
".",
"deg",
")",
")",
")",
"assert",
"(",
"not",
"allclose",
"(",
"gcrswd",
".",
"dec",
",",
"gcrsnod",
".",
"dec",
",",
"rtol",
"=",
"1e-08",
",",
"atol",
"=",
"(",
"1e-10",
"*",
"u",
".",
"deg",
")",
")",
")",
"assert",
"(",
"not",
"allclose",
"(",
"gcrswd",
".",
"distance",
",",
"icrs_coords",
"[",
"1",
"]",
".",
"distance",
",",
"rtol",
"=",
"1e-08",
",",
"atol",
"=",
"(",
"1e-10",
"*",
"u",
".",
"pc",
")",
")",
")"
] |
check that with and without distance give different icrs<->gcrs answers .
|
train
| false
|
3,981
|
def demo_simple_grid(fig):
grid = ImageGrid(fig, 141, nrows_ncols=(2, 2), axes_pad=0.05, label_mode='1')
(Z, extent) = get_demo_image()
for i in range(4):
im = grid[i].imshow(Z, extent=extent, interpolation='nearest')
grid.axes_llc.set_xticks([(-2), 0, 2])
grid.axes_llc.set_yticks([(-2), 0, 2])
|
[
"def",
"demo_simple_grid",
"(",
"fig",
")",
":",
"grid",
"=",
"ImageGrid",
"(",
"fig",
",",
"141",
",",
"nrows_ncols",
"=",
"(",
"2",
",",
"2",
")",
",",
"axes_pad",
"=",
"0.05",
",",
"label_mode",
"=",
"'1'",
")",
"(",
"Z",
",",
"extent",
")",
"=",
"get_demo_image",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"4",
")",
":",
"im",
"=",
"grid",
"[",
"i",
"]",
".",
"imshow",
"(",
"Z",
",",
"extent",
"=",
"extent",
",",
"interpolation",
"=",
"'nearest'",
")",
"grid",
".",
"axes_llc",
".",
"set_xticks",
"(",
"[",
"(",
"-",
"2",
")",
",",
"0",
",",
"2",
"]",
")",
"grid",
".",
"axes_llc",
".",
"set_yticks",
"(",
"[",
"(",
"-",
"2",
")",
",",
"0",
",",
"2",
"]",
")"
] |
a grid of 2x2 images with 0 .
|
train
| false
|
3,982
|
def IsDerivedFunction(clean_lines, linenum):
for i in xrange(linenum, max((-1), (linenum - 10)), (-1)):
match = Match('^([^()]*\\w+)\\(', clean_lines.elided[i])
if match:
(line, _, closing_paren) = CloseExpression(clean_lines, i, len(match.group(1)))
return ((closing_paren >= 0) and Search('\\boverride\\b', line[closing_paren:]))
return False
|
[
"def",
"IsDerivedFunction",
"(",
"clean_lines",
",",
"linenum",
")",
":",
"for",
"i",
"in",
"xrange",
"(",
"linenum",
",",
"max",
"(",
"(",
"-",
"1",
")",
",",
"(",
"linenum",
"-",
"10",
")",
")",
",",
"(",
"-",
"1",
")",
")",
":",
"match",
"=",
"Match",
"(",
"'^([^()]*\\\\w+)\\\\('",
",",
"clean_lines",
".",
"elided",
"[",
"i",
"]",
")",
"if",
"match",
":",
"(",
"line",
",",
"_",
",",
"closing_paren",
")",
"=",
"CloseExpression",
"(",
"clean_lines",
",",
"i",
",",
"len",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")",
"return",
"(",
"(",
"closing_paren",
">=",
"0",
")",
"and",
"Search",
"(",
"'\\\\boverride\\\\b'",
",",
"line",
"[",
"closing_paren",
":",
"]",
")",
")",
"return",
"False"
] |
check if current line contains an inherited function .
|
train
| true
|
3,983
|
@register(u'emacs-editing-mode')
def emacs_editing_mode(event):
event.cli.editing_mode = EditingMode.EMACS
|
[
"@",
"register",
"(",
"u'emacs-editing-mode'",
")",
"def",
"emacs_editing_mode",
"(",
"event",
")",
":",
"event",
".",
"cli",
".",
"editing_mode",
"=",
"EditingMode",
".",
"EMACS"
] |
switch to emacs editing mode .
|
train
| false
|
3,988
|
def add_actions(target, actions, insert_before=None):
previous_action = None
target_actions = list(target.actions())
if target_actions:
previous_action = target_actions[(-1)]
if previous_action.isSeparator():
previous_action = None
for action in actions:
if ((action is None) and (previous_action is not None)):
if (insert_before is None):
target.addSeparator()
else:
target.insertSeparator(insert_before)
elif isinstance(action, QMenu):
if (insert_before is None):
target.addMenu(action)
else:
target.insertMenu(insert_before, action)
elif isinstance(action, QAction):
if isinstance(action, SpyderAction):
if (isinstance(target, QMenu) or (not isinstance(target, QToolBar))):
action = action.no_icon_action
if (insert_before is None):
target.addAction(action)
else:
target.insertAction(insert_before, action)
previous_action = action
|
[
"def",
"add_actions",
"(",
"target",
",",
"actions",
",",
"insert_before",
"=",
"None",
")",
":",
"previous_action",
"=",
"None",
"target_actions",
"=",
"list",
"(",
"target",
".",
"actions",
"(",
")",
")",
"if",
"target_actions",
":",
"previous_action",
"=",
"target_actions",
"[",
"(",
"-",
"1",
")",
"]",
"if",
"previous_action",
".",
"isSeparator",
"(",
")",
":",
"previous_action",
"=",
"None",
"for",
"action",
"in",
"actions",
":",
"if",
"(",
"(",
"action",
"is",
"None",
")",
"and",
"(",
"previous_action",
"is",
"not",
"None",
")",
")",
":",
"if",
"(",
"insert_before",
"is",
"None",
")",
":",
"target",
".",
"addSeparator",
"(",
")",
"else",
":",
"target",
".",
"insertSeparator",
"(",
"insert_before",
")",
"elif",
"isinstance",
"(",
"action",
",",
"QMenu",
")",
":",
"if",
"(",
"insert_before",
"is",
"None",
")",
":",
"target",
".",
"addMenu",
"(",
"action",
")",
"else",
":",
"target",
".",
"insertMenu",
"(",
"insert_before",
",",
"action",
")",
"elif",
"isinstance",
"(",
"action",
",",
"QAction",
")",
":",
"if",
"isinstance",
"(",
"action",
",",
"SpyderAction",
")",
":",
"if",
"(",
"isinstance",
"(",
"target",
",",
"QMenu",
")",
"or",
"(",
"not",
"isinstance",
"(",
"target",
",",
"QToolBar",
")",
")",
")",
":",
"action",
"=",
"action",
".",
"no_icon_action",
"if",
"(",
"insert_before",
"is",
"None",
")",
":",
"target",
".",
"addAction",
"(",
"action",
")",
"else",
":",
"target",
".",
"insertAction",
"(",
"insert_before",
",",
"action",
")",
"previous_action",
"=",
"action"
] |
add actions to a qmenu or a qtoolbar .
|
train
| true
|
3,991
|
@step('I remove "([^"]+)" transcripts id from store')
def remove_transcripts_from_store(_step, subs_id):
filename = 'subs_{0}.srt.sjson'.format(subs_id.strip())
content_location = StaticContent.compute_location(world.scenario_dict['COURSE'].id, filename)
try:
content = contentstore().find(content_location)
contentstore().delete(content.location)
print 'Transcript file was removed from store.'
except NotFoundError:
print 'Transcript file was NOT found and not removed.'
|
[
"@",
"step",
"(",
"'I remove \"([^\"]+)\" transcripts id from store'",
")",
"def",
"remove_transcripts_from_store",
"(",
"_step",
",",
"subs_id",
")",
":",
"filename",
"=",
"'subs_{0}.srt.sjson'",
".",
"format",
"(",
"subs_id",
".",
"strip",
"(",
")",
")",
"content_location",
"=",
"StaticContent",
".",
"compute_location",
"(",
"world",
".",
"scenario_dict",
"[",
"'COURSE'",
"]",
".",
"id",
",",
"filename",
")",
"try",
":",
"content",
"=",
"contentstore",
"(",
")",
".",
"find",
"(",
"content_location",
")",
"contentstore",
"(",
")",
".",
"delete",
"(",
"content",
".",
"location",
")",
"print",
"'Transcript file was removed from store.'",
"except",
"NotFoundError",
":",
"print",
"'Transcript file was NOT found and not removed.'"
] |
remove from store .
|
train
| false
|
3,994
|
def mathml(expr, **settings):
return MathMLPrinter(settings).doprint(expr)
|
[
"def",
"mathml",
"(",
"expr",
",",
"**",
"settings",
")",
":",
"return",
"MathMLPrinter",
"(",
"settings",
")",
".",
"doprint",
"(",
"expr",
")"
] |
returns the mathml representation of expr .
|
train
| false
|
3,996
|
def startDtmfAcknowledge():
a = TpPd(pd=3)
b = MessageType(mesType=50)
c = KeypadFacilityHdr(ieiKF=44, eightBitKF=0)
packet = ((a / b) / c)
return packet
|
[
"def",
"startDtmfAcknowledge",
"(",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"3",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"50",
")",
"c",
"=",
"KeypadFacilityHdr",
"(",
"ieiKF",
"=",
"44",
",",
"eightBitKF",
"=",
"0",
")",
"packet",
"=",
"(",
"(",
"a",
"/",
"b",
")",
"/",
"c",
")",
"return",
"packet"
] |
start dtmf acknowledge section 9 .
|
train
| true
|
3,997
|
def data_for_url(url):
path = url.path()
host = url.host()
log.misc.debug('url: {}, path: {}, host {}'.format(url.toDisplayString(), path, host))
try:
handler = _HANDLERS[path]
except KeyError:
try:
handler = _HANDLERS[host]
except KeyError:
raise NoHandlerFound(url)
try:
(mimetype, data) = handler(url)
except OSError as e:
raise QuteSchemeOSError(e)
except QuteSchemeError as e:
raise
assert (mimetype is not None), url
if ((mimetype == 'text/html') and isinstance(data, str)):
data = data.encode('utf-8', errors='xmlcharrefreplace')
return (mimetype, data)
|
[
"def",
"data_for_url",
"(",
"url",
")",
":",
"path",
"=",
"url",
".",
"path",
"(",
")",
"host",
"=",
"url",
".",
"host",
"(",
")",
"log",
".",
"misc",
".",
"debug",
"(",
"'url: {}, path: {}, host {}'",
".",
"format",
"(",
"url",
".",
"toDisplayString",
"(",
")",
",",
"path",
",",
"host",
")",
")",
"try",
":",
"handler",
"=",
"_HANDLERS",
"[",
"path",
"]",
"except",
"KeyError",
":",
"try",
":",
"handler",
"=",
"_HANDLERS",
"[",
"host",
"]",
"except",
"KeyError",
":",
"raise",
"NoHandlerFound",
"(",
"url",
")",
"try",
":",
"(",
"mimetype",
",",
"data",
")",
"=",
"handler",
"(",
"url",
")",
"except",
"OSError",
"as",
"e",
":",
"raise",
"QuteSchemeOSError",
"(",
"e",
")",
"except",
"QuteSchemeError",
"as",
"e",
":",
"raise",
"assert",
"(",
"mimetype",
"is",
"not",
"None",
")",
",",
"url",
"if",
"(",
"(",
"mimetype",
"==",
"'text/html'",
")",
"and",
"isinstance",
"(",
"data",
",",
"str",
")",
")",
":",
"data",
"=",
"data",
".",
"encode",
"(",
"'utf-8'",
",",
"errors",
"=",
"'xmlcharrefreplace'",
")",
"return",
"(",
"mimetype",
",",
"data",
")"
] |
get the data to show for the given url .
|
train
| false
|
4,004
|
def addPillarFromConvexLoopsGrids(faces, grids, indexedLoops):
cellBottomLoops = getIndexedCellLoopsFromIndexedGrid(grids[0])
for cellBottomLoop in cellBottomLoops:
addFacesByConvexReversed(faces, cellBottomLoop)
addFacesByConvexLoops(faces, indexedLoops)
cellTopLoops = getIndexedCellLoopsFromIndexedGrid(grids[(-1)])
for cellTopLoop in cellTopLoops:
addFacesByConvex(faces, cellTopLoop)
|
[
"def",
"addPillarFromConvexLoopsGrids",
"(",
"faces",
",",
"grids",
",",
"indexedLoops",
")",
":",
"cellBottomLoops",
"=",
"getIndexedCellLoopsFromIndexedGrid",
"(",
"grids",
"[",
"0",
"]",
")",
"for",
"cellBottomLoop",
"in",
"cellBottomLoops",
":",
"addFacesByConvexReversed",
"(",
"faces",
",",
"cellBottomLoop",
")",
"addFacesByConvexLoops",
"(",
"faces",
",",
"indexedLoops",
")",
"cellTopLoops",
"=",
"getIndexedCellLoopsFromIndexedGrid",
"(",
"grids",
"[",
"(",
"-",
"1",
")",
"]",
")",
"for",
"cellTopLoop",
"in",
"cellTopLoops",
":",
"addFacesByConvex",
"(",
"faces",
",",
"cellTopLoop",
")"
] |
add pillar from convex loops and grids .
|
train
| false
|
4,005
|
def _calc_elbo(vars, model, n_mcsamples, random_seed):
theano.config.compute_test_value = 'ignore'
shared = pm.make_shared_replacements(vars, model)
factors = ([var.logpt for var in model.basic_RVs] + model.potentials)
logpt = tt.add(*map(tt.sum, factors))
([logp], inarray) = pm.join_nonshared_inputs([logpt], vars, shared)
uw = tt.vector('uw')
uw.tag.test_value = floatX(np.concatenate([inarray.tag.test_value, inarray.tag.test_value]))
elbo = _elbo_t(logp, uw, inarray, n_mcsamples, random_seed)
return (elbo, shared)
|
[
"def",
"_calc_elbo",
"(",
"vars",
",",
"model",
",",
"n_mcsamples",
",",
"random_seed",
")",
":",
"theano",
".",
"config",
".",
"compute_test_value",
"=",
"'ignore'",
"shared",
"=",
"pm",
".",
"make_shared_replacements",
"(",
"vars",
",",
"model",
")",
"factors",
"=",
"(",
"[",
"var",
".",
"logpt",
"for",
"var",
"in",
"model",
".",
"basic_RVs",
"]",
"+",
"model",
".",
"potentials",
")",
"logpt",
"=",
"tt",
".",
"add",
"(",
"*",
"map",
"(",
"tt",
".",
"sum",
",",
"factors",
")",
")",
"(",
"[",
"logp",
"]",
",",
"inarray",
")",
"=",
"pm",
".",
"join_nonshared_inputs",
"(",
"[",
"logpt",
"]",
",",
"vars",
",",
"shared",
")",
"uw",
"=",
"tt",
".",
"vector",
"(",
"'uw'",
")",
"uw",
".",
"tag",
".",
"test_value",
"=",
"floatX",
"(",
"np",
".",
"concatenate",
"(",
"[",
"inarray",
".",
"tag",
".",
"test_value",
",",
"inarray",
".",
"tag",
".",
"test_value",
"]",
")",
")",
"elbo",
"=",
"_elbo_t",
"(",
"logp",
",",
"uw",
",",
"inarray",
",",
"n_mcsamples",
",",
"random_seed",
")",
"return",
"(",
"elbo",
",",
"shared",
")"
] |
calculate approximate elbo .
|
train
| false
|
4,006
|
def grain(tgt, delimiter=DEFAULT_TARGET_DELIM):
matcher = salt.minion.Matcher({'grains': __grains__}, __salt__)
try:
return matcher.grain_match(tgt, delimiter=delimiter)
except Exception as exc:
log.exception(exc)
return False
|
[
"def",
"grain",
"(",
"tgt",
",",
"delimiter",
"=",
"DEFAULT_TARGET_DELIM",
")",
":",
"matcher",
"=",
"salt",
".",
"minion",
".",
"Matcher",
"(",
"{",
"'grains'",
":",
"__grains__",
"}",
",",
"__salt__",
")",
"try",
":",
"return",
"matcher",
".",
"grain_match",
"(",
"tgt",
",",
"delimiter",
"=",
"delimiter",
")",
"except",
"Exception",
"as",
"exc",
":",
"log",
".",
"exception",
"(",
"exc",
")",
"return",
"False"
] |
return true if the minion matches the given grain target .
|
train
| false
|
4,009
|
@command('url\\s(.*[-_a-zA-Z0-9]{11}.*)')
def yt_url(url, print_title=0):
url_list = url.split()
g.model.songs = []
for u in url_list:
try:
p = util.get_pafy(u)
except (IOError, ValueError) as e:
g.message = ((c.r + str(e)) + c.w)
g.content = (g.content or content.generate_songlist_display(zeromsg=g.message))
return
g.browse_mode = 'normal'
v = Video(p.videoid, p.title, p.length)
g.model.songs.append(v)
if (not g.command_line):
g.content = content.generate_songlist_display()
if print_title:
util.xprint(v.title)
|
[
"@",
"command",
"(",
"'url\\\\s(.*[-_a-zA-Z0-9]{11}.*)'",
")",
"def",
"yt_url",
"(",
"url",
",",
"print_title",
"=",
"0",
")",
":",
"url_list",
"=",
"url",
".",
"split",
"(",
")",
"g",
".",
"model",
".",
"songs",
"=",
"[",
"]",
"for",
"u",
"in",
"url_list",
":",
"try",
":",
"p",
"=",
"util",
".",
"get_pafy",
"(",
"u",
")",
"except",
"(",
"IOError",
",",
"ValueError",
")",
"as",
"e",
":",
"g",
".",
"message",
"=",
"(",
"(",
"c",
".",
"r",
"+",
"str",
"(",
"e",
")",
")",
"+",
"c",
".",
"w",
")",
"g",
".",
"content",
"=",
"(",
"g",
".",
"content",
"or",
"content",
".",
"generate_songlist_display",
"(",
"zeromsg",
"=",
"g",
".",
"message",
")",
")",
"return",
"g",
".",
"browse_mode",
"=",
"'normal'",
"v",
"=",
"Video",
"(",
"p",
".",
"videoid",
",",
"p",
".",
"title",
",",
"p",
".",
"length",
")",
"g",
".",
"model",
".",
"songs",
".",
"append",
"(",
"v",
")",
"if",
"(",
"not",
"g",
".",
"command_line",
")",
":",
"g",
".",
"content",
"=",
"content",
".",
"generate_songlist_display",
"(",
")",
"if",
"print_title",
":",
"util",
".",
"xprint",
"(",
"v",
".",
"title",
")"
] |
acess videos by urls .
|
train
| false
|
4,011
|
def add_products(basket, args):
for (price, quantity) in args:
add_product(basket, price, quantity)
|
[
"def",
"add_products",
"(",
"basket",
",",
"args",
")",
":",
"for",
"(",
"price",
",",
"quantity",
")",
"in",
"args",
":",
"add_product",
"(",
"basket",
",",
"price",
",",
"quantity",
")"
] |
helper to add a series of products to the passed basket .
|
train
| false
|
4,012
|
def test_error(qtbot, proc, caplog, message_mock):
with caplog.at_level(logging.ERROR, 'message'):
with qtbot.waitSignal(proc.error, timeout=5000):
proc.start('this_does_not_exist_either', [])
msg = message_mock.getmsg(usertypes.MessageLevel.error)
expected_msg = 'Error while spawning testprocess: The process failed to start.'
assert (msg.text == expected_msg)
|
[
"def",
"test_error",
"(",
"qtbot",
",",
"proc",
",",
"caplog",
",",
"message_mock",
")",
":",
"with",
"caplog",
".",
"at_level",
"(",
"logging",
".",
"ERROR",
",",
"'message'",
")",
":",
"with",
"qtbot",
".",
"waitSignal",
"(",
"proc",
".",
"error",
",",
"timeout",
"=",
"5000",
")",
":",
"proc",
".",
"start",
"(",
"'this_does_not_exist_either'",
",",
"[",
"]",
")",
"msg",
"=",
"message_mock",
".",
"getmsg",
"(",
"usertypes",
".",
"MessageLevel",
".",
"error",
")",
"expected_msg",
"=",
"'Error while spawning testprocess: The process failed to start.'",
"assert",
"(",
"msg",
".",
"text",
"==",
"expected_msg",
")"
] |
test the process emitting an error .
|
train
| false
|
4,013
|
def getNewRepository():
return ExportRepository()
|
[
"def",
"getNewRepository",
"(",
")",
":",
"return",
"ExportRepository",
"(",
")"
] |
get new repository .
|
train
| false
|
4,014
|
def server_reboot(host=None, admin_username=None, admin_password=None, module=None):
return __execute_cmd('serveraction powercycle', host=host, admin_username=admin_username, admin_password=admin_password, module=module)
|
[
"def",
"server_reboot",
"(",
"host",
"=",
"None",
",",
"admin_username",
"=",
"None",
",",
"admin_password",
"=",
"None",
",",
"module",
"=",
"None",
")",
":",
"return",
"__execute_cmd",
"(",
"'serveraction powercycle'",
",",
"host",
"=",
"host",
",",
"admin_username",
"=",
"admin_username",
",",
"admin_password",
"=",
"admin_password",
",",
"module",
"=",
"module",
")"
] |
issues a power-cycle operation on the managed server .
|
train
| true
|
4,015
|
def req_match():
return s3db.req_match()
|
[
"def",
"req_match",
"(",
")",
":",
"return",
"s3db",
".",
"req_match",
"(",
")"
] |
match requests .
|
train
| false
|
4,016
|
def manual_order(qs, pks, pk_name='id'):
if (not pks):
return qs.none()
return qs.filter(id__in=pks).extra(select={'_manual': ('FIELD(%s, %s)' % (pk_name, ','.join(map(str, pks))))}, order_by=['_manual'])
|
[
"def",
"manual_order",
"(",
"qs",
",",
"pks",
",",
"pk_name",
"=",
"'id'",
")",
":",
"if",
"(",
"not",
"pks",
")",
":",
"return",
"qs",
".",
"none",
"(",
")",
"return",
"qs",
".",
"filter",
"(",
"id__in",
"=",
"pks",
")",
".",
"extra",
"(",
"select",
"=",
"{",
"'_manual'",
":",
"(",
"'FIELD(%s, %s)'",
"%",
"(",
"pk_name",
",",
"','",
".",
"join",
"(",
"map",
"(",
"str",
",",
"pks",
")",
")",
")",
")",
"}",
",",
"order_by",
"=",
"[",
"'_manual'",
"]",
")"
] |
given a query set and a list of primary keys .
|
train
| false
|
4,017
|
def do_select():
PCap.use_select = True
|
[
"def",
"do_select",
"(",
")",
":",
"PCap",
".",
"use_select",
"=",
"True"
] |
filters a sequence of objects by applying a test to each object .
|
train
| false
|
4,018
|
def libvlc_get_version():
f = (_Cfunctions.get('libvlc_get_version', None) or _Cfunction('libvlc_get_version', (), None, ctypes.c_char_p))
return f()
|
[
"def",
"libvlc_get_version",
"(",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_get_version'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_get_version'",
",",
"(",
")",
",",
"None",
",",
"ctypes",
".",
"c_char_p",
")",
")",
"return",
"f",
"(",
")"
] |
retrieve libvlc version .
|
train
| false
|
4,019
|
def ghcommit_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
app = inliner.document.settings.env.app
try:
base = app.config.github_project_url
if (not base):
raise AttributeError
if (not base.endswith('/')):
base += '/'
except AttributeError as err:
raise ValueError(('github_project_url configuration value is not set (%s)' % str(err)))
ref = (base + text)
node = nodes.reference(rawtext, text[:6], refuri=ref, **options)
return ([node], [])
|
[
"def",
"ghcommit_role",
"(",
"name",
",",
"rawtext",
",",
"text",
",",
"lineno",
",",
"inliner",
",",
"options",
"=",
"{",
"}",
",",
"content",
"=",
"[",
"]",
")",
":",
"app",
"=",
"inliner",
".",
"document",
".",
"settings",
".",
"env",
".",
"app",
"try",
":",
"base",
"=",
"app",
".",
"config",
".",
"github_project_url",
"if",
"(",
"not",
"base",
")",
":",
"raise",
"AttributeError",
"if",
"(",
"not",
"base",
".",
"endswith",
"(",
"'/'",
")",
")",
":",
"base",
"+=",
"'/'",
"except",
"AttributeError",
"as",
"err",
":",
"raise",
"ValueError",
"(",
"(",
"'github_project_url configuration value is not set (%s)'",
"%",
"str",
"(",
"err",
")",
")",
")",
"ref",
"=",
"(",
"base",
"+",
"text",
")",
"node",
"=",
"nodes",
".",
"reference",
"(",
"rawtext",
",",
"text",
"[",
":",
"6",
"]",
",",
"refuri",
"=",
"ref",
",",
"**",
"options",
")",
"return",
"(",
"[",
"node",
"]",
",",
"[",
"]",
")"
] |
link to a github commit .
|
train
| true
|
4,020
|
@pytest.fixture(scope=u'session')
def use_celery_app_trap():
return False
|
[
"@",
"pytest",
".",
"fixture",
"(",
"scope",
"=",
"u'session'",
")",
"def",
"use_celery_app_trap",
"(",
")",
":",
"return",
"False"
] |
you can override this fixture to enable the app trap .
|
train
| false
|
4,021
|
@task
def code_install():
assert env.host_string, 'no hosts specified'
assert (not is_old_code()), 'Active code is using the old style (directory instead of symlink). Manual intervention required'
rev = code_prep()
if code_verify(rev):
return
fprint(('Installing code (rev %s)' % rev))
filename = ('viewfinder.%s.tar.gz' % rev)
dirname = ('viewfinder.%s' % rev)
put(filename, ('~/%s' % filename))
run(('mkdir -p ~/%s' % dirname))
with cd(('~/%s' % dirname)):
run(('tar xzvf ../%s' % filename))
run(('mkdir -p ~/%s/pythonpath' % dirname))
with cd(('~/%s/pythonpath' % dirname)):
run(('ln -f -s ~/%s viewfinder' % dirname))
run(('rm -f ~/%s' % filename))
code_cleanup()
|
[
"@",
"task",
"def",
"code_install",
"(",
")",
":",
"assert",
"env",
".",
"host_string",
",",
"'no hosts specified'",
"assert",
"(",
"not",
"is_old_code",
"(",
")",
")",
",",
"'Active code is using the old style (directory instead of symlink). Manual intervention required'",
"rev",
"=",
"code_prep",
"(",
")",
"if",
"code_verify",
"(",
"rev",
")",
":",
"return",
"fprint",
"(",
"(",
"'Installing code (rev %s)'",
"%",
"rev",
")",
")",
"filename",
"=",
"(",
"'viewfinder.%s.tar.gz'",
"%",
"rev",
")",
"dirname",
"=",
"(",
"'viewfinder.%s'",
"%",
"rev",
")",
"put",
"(",
"filename",
",",
"(",
"'~/%s'",
"%",
"filename",
")",
")",
"run",
"(",
"(",
"'mkdir -p ~/%s'",
"%",
"dirname",
")",
")",
"with",
"cd",
"(",
"(",
"'~/%s'",
"%",
"dirname",
")",
")",
":",
"run",
"(",
"(",
"'tar xzvf ../%s'",
"%",
"filename",
")",
")",
"run",
"(",
"(",
"'mkdir -p ~/%s/pythonpath'",
"%",
"dirname",
")",
")",
"with",
"cd",
"(",
"(",
"'~/%s/pythonpath'",
"%",
"dirname",
")",
")",
":",
"run",
"(",
"(",
"'ln -f -s ~/%s viewfinder'",
"%",
"dirname",
")",
")",
"run",
"(",
"(",
"'rm -f ~/%s'",
"%",
"filename",
")",
")",
"code_cleanup",
"(",
")"
] |
install latest code from local directory .
|
train
| false
|
4,022
|
def create_release_branch(version, base_branch):
try:
base_branch.checkout(b=('release/flocker-' + version))
except GitCommandError:
raise BranchExists()
|
[
"def",
"create_release_branch",
"(",
"version",
",",
"base_branch",
")",
":",
"try",
":",
"base_branch",
".",
"checkout",
"(",
"b",
"=",
"(",
"'release/flocker-'",
"+",
"version",
")",
")",
"except",
"GitCommandError",
":",
"raise",
"BranchExists",
"(",
")"
] |
checkout a new git branch to make changes on and later tag as a release .
|
train
| false
|
4,024
|
def newDerObjectId(dottedstring):
der = DerObjectId(dottedstring)
return der
|
[
"def",
"newDerObjectId",
"(",
"dottedstring",
")",
":",
"der",
"=",
"DerObjectId",
"(",
"dottedstring",
")",
"return",
"der"
] |
create a derobjectid object .
|
train
| false
|
4,025
|
@flake8ext
def check_no_basestring(logical_line):
if re.search('\\bbasestring\\b', logical_line):
msg = 'N326: basestring is not Python3-compatible, use six.string_types instead.'
(yield (0, msg))
|
[
"@",
"flake8ext",
"def",
"check_no_basestring",
"(",
"logical_line",
")",
":",
"if",
"re",
".",
"search",
"(",
"'\\\\bbasestring\\\\b'",
",",
"logical_line",
")",
":",
"msg",
"=",
"'N326: basestring is not Python3-compatible, use six.string_types instead.'",
"(",
"yield",
"(",
"0",
",",
"msg",
")",
")"
] |
n326 - dont use basestring .
|
train
| false
|
4,028
|
def remove_items(headers, condition):
removed = {}
keys = filter(condition, headers)
removed.update(((key, headers.pop(key)) for key in keys))
return removed
|
[
"def",
"remove_items",
"(",
"headers",
",",
"condition",
")",
":",
"removed",
"=",
"{",
"}",
"keys",
"=",
"filter",
"(",
"condition",
",",
"headers",
")",
"removed",
".",
"update",
"(",
"(",
"(",
"key",
",",
"headers",
".",
"pop",
"(",
"key",
")",
")",
"for",
"key",
"in",
"keys",
")",
")",
"return",
"removed"
] |
remove items matching query from lib .
|
train
| false
|
4,029
|
def _flip_vectors(jn_matrix, m_matrix):
m_matrix_trans = m_matrix.transpose()
jn_matrix_trans = jn_matrix.transpose()
new_matrix = zeros(jn_matrix_trans.shape, float)
for (i, m_vector) in enumerate(m_matrix_trans):
jn_vector = jn_matrix_trans[i]
disT = list((m_vector - jn_vector))
disT = sum(map(abs, disT))
jn_flip = (jn_vector * [(-1)])
disF = list((m_vector - jn_flip))
disF = sum(map(abs, disF))
if (disT > disF):
new_matrix[i] = jn_flip
else:
new_matrix[i] = jn_vector
return new_matrix.transpose()
|
[
"def",
"_flip_vectors",
"(",
"jn_matrix",
",",
"m_matrix",
")",
":",
"m_matrix_trans",
"=",
"m_matrix",
".",
"transpose",
"(",
")",
"jn_matrix_trans",
"=",
"jn_matrix",
".",
"transpose",
"(",
")",
"new_matrix",
"=",
"zeros",
"(",
"jn_matrix_trans",
".",
"shape",
",",
"float",
")",
"for",
"(",
"i",
",",
"m_vector",
")",
"in",
"enumerate",
"(",
"m_matrix_trans",
")",
":",
"jn_vector",
"=",
"jn_matrix_trans",
"[",
"i",
"]",
"disT",
"=",
"list",
"(",
"(",
"m_vector",
"-",
"jn_vector",
")",
")",
"disT",
"=",
"sum",
"(",
"map",
"(",
"abs",
",",
"disT",
")",
")",
"jn_flip",
"=",
"(",
"jn_vector",
"*",
"[",
"(",
"-",
"1",
")",
"]",
")",
"disF",
"=",
"list",
"(",
"(",
"m_vector",
"-",
"jn_flip",
")",
")",
"disF",
"=",
"sum",
"(",
"map",
"(",
"abs",
",",
"disF",
")",
")",
"if",
"(",
"disT",
">",
"disF",
")",
":",
"new_matrix",
"[",
"i",
"]",
"=",
"jn_flip",
"else",
":",
"new_matrix",
"[",
"i",
"]",
"=",
"jn_vector",
"return",
"new_matrix",
".",
"transpose",
"(",
")"
] |
transforms pca vectors so that signs are correct .
|
train
| false
|
4,030
|
@pytest.mark.skipif('not HAS_BEAUTIFUL_SOUP')
def test_multicolumn_read():
table = Table.read('t/html2.html', format='ascii.html')
str_type = np.dtype((np.str, 21))
expected = Table(np.array([(['1', '2.5000000000000000001'], 3), (['1a', '1'], 3.5)], dtype=[('A', str_type, (2,)), ('B', '<f8')]))
assert np.all((table == expected))
|
[
"@",
"pytest",
".",
"mark",
".",
"skipif",
"(",
"'not HAS_BEAUTIFUL_SOUP'",
")",
"def",
"test_multicolumn_read",
"(",
")",
":",
"table",
"=",
"Table",
".",
"read",
"(",
"'t/html2.html'",
",",
"format",
"=",
"'ascii.html'",
")",
"str_type",
"=",
"np",
".",
"dtype",
"(",
"(",
"np",
".",
"str",
",",
"21",
")",
")",
"expected",
"=",
"Table",
"(",
"np",
".",
"array",
"(",
"[",
"(",
"[",
"'1'",
",",
"'2.5000000000000000001'",
"]",
",",
"3",
")",
",",
"(",
"[",
"'1a'",
",",
"'1'",
"]",
",",
"3.5",
")",
"]",
",",
"dtype",
"=",
"[",
"(",
"'A'",
",",
"str_type",
",",
"(",
"2",
",",
")",
")",
",",
"(",
"'B'",
",",
"'<f8'",
")",
"]",
")",
")",
"assert",
"np",
".",
"all",
"(",
"(",
"table",
"==",
"expected",
")",
")"
] |
test to make sure that the html reader inputs multidimensional columns using the colspan attribute of <th> .
|
train
| false
|
4,031
|
def unhold(name=None, pkgs=None, sources=None, **kwargs):
_check_versionlock()
if ((not name) and (not pkgs) and (not sources)):
raise SaltInvocationError('One of name, pkgs, or sources must be specified.')
if (pkgs and sources):
raise SaltInvocationError('Only one of pkgs or sources can be specified.')
targets = []
if pkgs:
for pkg in salt.utils.repack_dictlist(pkgs):
targets.append(pkg)
elif sources:
for source in sources:
targets.append(next(iter(source)))
else:
targets.append(name)
current_locks = list_holds(full=(_yum() == 'yum'))
ret = {}
for target in targets:
if isinstance(target, dict):
target = next(six.iterkeys(target))
ret[target] = {'name': target, 'changes': {}, 'result': False, 'comment': ''}
if (_yum() == 'dnf'):
search_locks = [x for x in current_locks if (x == target)]
else:
search_locks = [x for x in current_locks if (fnmatch.fnmatch(x, '*{0}*'.format(target)) and (target == _get_hold(x, full=False)))]
if search_locks:
if __opts__['test']:
ret[target].update(result=None)
ret[target]['comment'] = 'Package {0} is set to be unheld.'.format(target)
else:
out = __salt__['cmd.run_all'](([_yum(), 'versionlock', 'delete'] + search_locks), python_shell=False)
if (out['retcode'] == 0):
ret[target].update(result=True)
ret[target]['comment'] = 'Package {0} is no longer held.'.format(target)
ret[target]['changes']['new'] = ''
ret[target]['changes']['old'] = 'hold'
else:
ret[target]['comment'] = 'Package {0} was unable to be unheld.'.format(target)
else:
ret[target].update(result=True)
ret[target]['comment'] = 'Package {0} is not being held.'.format(target)
return ret
|
[
"def",
"unhold",
"(",
"name",
"=",
"None",
",",
"pkgs",
"=",
"None",
",",
"sources",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"_check_versionlock",
"(",
")",
"if",
"(",
"(",
"not",
"name",
")",
"and",
"(",
"not",
"pkgs",
")",
"and",
"(",
"not",
"sources",
")",
")",
":",
"raise",
"SaltInvocationError",
"(",
"'One of name, pkgs, or sources must be specified.'",
")",
"if",
"(",
"pkgs",
"and",
"sources",
")",
":",
"raise",
"SaltInvocationError",
"(",
"'Only one of pkgs or sources can be specified.'",
")",
"targets",
"=",
"[",
"]",
"if",
"pkgs",
":",
"for",
"pkg",
"in",
"salt",
".",
"utils",
".",
"repack_dictlist",
"(",
"pkgs",
")",
":",
"targets",
".",
"append",
"(",
"pkg",
")",
"elif",
"sources",
":",
"for",
"source",
"in",
"sources",
":",
"targets",
".",
"append",
"(",
"next",
"(",
"iter",
"(",
"source",
")",
")",
")",
"else",
":",
"targets",
".",
"append",
"(",
"name",
")",
"current_locks",
"=",
"list_holds",
"(",
"full",
"=",
"(",
"_yum",
"(",
")",
"==",
"'yum'",
")",
")",
"ret",
"=",
"{",
"}",
"for",
"target",
"in",
"targets",
":",
"if",
"isinstance",
"(",
"target",
",",
"dict",
")",
":",
"target",
"=",
"next",
"(",
"six",
".",
"iterkeys",
"(",
"target",
")",
")",
"ret",
"[",
"target",
"]",
"=",
"{",
"'name'",
":",
"target",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"False",
",",
"'comment'",
":",
"''",
"}",
"if",
"(",
"_yum",
"(",
")",
"==",
"'dnf'",
")",
":",
"search_locks",
"=",
"[",
"x",
"for",
"x",
"in",
"current_locks",
"if",
"(",
"x",
"==",
"target",
")",
"]",
"else",
":",
"search_locks",
"=",
"[",
"x",
"for",
"x",
"in",
"current_locks",
"if",
"(",
"fnmatch",
".",
"fnmatch",
"(",
"x",
",",
"'*{0}*'",
".",
"format",
"(",
"target",
")",
")",
"and",
"(",
"target",
"==",
"_get_hold",
"(",
"x",
",",
"full",
"=",
"False",
")",
")",
")",
"]",
"if",
"search_locks",
":",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"target",
"]",
".",
"update",
"(",
"result",
"=",
"None",
")",
"ret",
"[",
"target",
"]",
"[",
"'comment'",
"]",
"=",
"'Package {0} is set to be unheld.'",
".",
"format",
"(",
"target",
")",
"else",
":",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"(",
"[",
"_yum",
"(",
")",
",",
"'versionlock'",
",",
"'delete'",
"]",
"+",
"search_locks",
")",
",",
"python_shell",
"=",
"False",
")",
"if",
"(",
"out",
"[",
"'retcode'",
"]",
"==",
"0",
")",
":",
"ret",
"[",
"target",
"]",
".",
"update",
"(",
"result",
"=",
"True",
")",
"ret",
"[",
"target",
"]",
"[",
"'comment'",
"]",
"=",
"'Package {0} is no longer held.'",
".",
"format",
"(",
"target",
")",
"ret",
"[",
"target",
"]",
"[",
"'changes'",
"]",
"[",
"'new'",
"]",
"=",
"''",
"ret",
"[",
"target",
"]",
"[",
"'changes'",
"]",
"[",
"'old'",
"]",
"=",
"'hold'",
"else",
":",
"ret",
"[",
"target",
"]",
"[",
"'comment'",
"]",
"=",
"'Package {0} was unable to be unheld.'",
".",
"format",
"(",
"target",
")",
"else",
":",
"ret",
"[",
"target",
"]",
".",
"update",
"(",
"result",
"=",
"True",
")",
"ret",
"[",
"target",
"]",
"[",
"'comment'",
"]",
"=",
"'Package {0} is not being held.'",
".",
"format",
"(",
"target",
")",
"return",
"ret"
] |
set held message(s) in the mail queue to unheld cli example: .
|
train
| true
|
4,032
|
def scriptsafe_dumps(obj, **kwargs):
text = _force_unicode(json.dumps(obj, **kwargs))
return _Unsafe(text.translate(_json_escapes))
|
[
"def",
"scriptsafe_dumps",
"(",
"obj",
",",
"**",
"kwargs",
")",
":",
"text",
"=",
"_force_unicode",
"(",
"json",
".",
"dumps",
"(",
"obj",
",",
"**",
"kwargs",
")",
")",
"return",
"_Unsafe",
"(",
"text",
".",
"translate",
"(",
"_json_escapes",
")",
")"
] |
like json .
|
train
| false
|
4,033
|
def _read_filter(fid):
f = dict()
f['freq'] = _read_double(fid)[0]
f['class'] = _read_int(fid)
f['type'] = _read_int(fid)
f['npar'] = _read_int2(fid)
f['pars'] = _read_double(fid, f['npar'])
return f
|
[
"def",
"_read_filter",
"(",
"fid",
")",
":",
"f",
"=",
"dict",
"(",
")",
"f",
"[",
"'freq'",
"]",
"=",
"_read_double",
"(",
"fid",
")",
"[",
"0",
"]",
"f",
"[",
"'class'",
"]",
"=",
"_read_int",
"(",
"fid",
")",
"f",
"[",
"'type'",
"]",
"=",
"_read_int",
"(",
"fid",
")",
"f",
"[",
"'npar'",
"]",
"=",
"_read_int2",
"(",
"fid",
")",
"f",
"[",
"'pars'",
"]",
"=",
"_read_double",
"(",
"fid",
",",
"f",
"[",
"'npar'",
"]",
")",
"return",
"f"
] |
read filter information .
|
train
| false
|
4,034
|
def parse_geometry(geometry, ratio=None):
m = geometry_pat.match(geometry)
def syntax_error():
return ThumbnailParseError(('Geometry does not have the correct syntax: %s' % geometry))
if (not m):
raise syntax_error()
x = m.group('x')
y = m.group('y')
if ((x is None) and (y is None)):
raise syntax_error()
if (x is not None):
x = int(x)
if (y is not None):
y = int(y)
if (ratio is not None):
ratio = float(ratio)
if (x is None):
x = toint((y * ratio))
elif (y is None):
y = toint((x / ratio))
return (x, y)
|
[
"def",
"parse_geometry",
"(",
"geometry",
",",
"ratio",
"=",
"None",
")",
":",
"m",
"=",
"geometry_pat",
".",
"match",
"(",
"geometry",
")",
"def",
"syntax_error",
"(",
")",
":",
"return",
"ThumbnailParseError",
"(",
"(",
"'Geometry does not have the correct syntax: %s'",
"%",
"geometry",
")",
")",
"if",
"(",
"not",
"m",
")",
":",
"raise",
"syntax_error",
"(",
")",
"x",
"=",
"m",
".",
"group",
"(",
"'x'",
")",
"y",
"=",
"m",
".",
"group",
"(",
"'y'",
")",
"if",
"(",
"(",
"x",
"is",
"None",
")",
"and",
"(",
"y",
"is",
"None",
")",
")",
":",
"raise",
"syntax_error",
"(",
")",
"if",
"(",
"x",
"is",
"not",
"None",
")",
":",
"x",
"=",
"int",
"(",
"x",
")",
"if",
"(",
"y",
"is",
"not",
"None",
")",
":",
"y",
"=",
"int",
"(",
"y",
")",
"if",
"(",
"ratio",
"is",
"not",
"None",
")",
":",
"ratio",
"=",
"float",
"(",
"ratio",
")",
"if",
"(",
"x",
"is",
"None",
")",
":",
"x",
"=",
"toint",
"(",
"(",
"y",
"*",
"ratio",
")",
")",
"elif",
"(",
"y",
"is",
"None",
")",
":",
"y",
"=",
"toint",
"(",
"(",
"x",
"/",
"ratio",
")",
")",
"return",
"(",
"x",
",",
"y",
")"
] |
parses a geometry string syntax and returns a tuple .
|
train
| true
|
4,035
|
def format_taxa_summary(taxa_summary):
result = (('Taxon DCTB ' + ' DCTB '.join(taxa_summary[0])) + '\n')
for (taxon, row) in zip(taxa_summary[1], taxa_summary[2]):
row = map(str, row)
result += ((('%s DCTB ' % taxon) + ' DCTB '.join(row)) + '\n')
return result
|
[
"def",
"format_taxa_summary",
"(",
"taxa_summary",
")",
":",
"result",
"=",
"(",
"(",
"'Taxon DCTB '",
"+",
"' DCTB '",
".",
"join",
"(",
"taxa_summary",
"[",
"0",
"]",
")",
")",
"+",
"'\\n'",
")",
"for",
"(",
"taxon",
",",
"row",
")",
"in",
"zip",
"(",
"taxa_summary",
"[",
"1",
"]",
",",
"taxa_summary",
"[",
"2",
"]",
")",
":",
"row",
"=",
"map",
"(",
"str",
",",
"row",
")",
"result",
"+=",
"(",
"(",
"(",
"'%s DCTB '",
"%",
"taxon",
")",
"+",
"' DCTB '",
".",
"join",
"(",
"row",
")",
")",
"+",
"'\\n'",
")",
"return",
"result"
] |
formats a taxa summary to be suitable for writing to a file .
|
train
| false
|
4,037
|
def org_facility_rheader(r, tabs=[]):
T = current.T
s3db = current.s3db
(tablename, record) = s3_rheader_resource(r)
r.record = record
r.table = s3db[tablename]
tabs = [(T('Details'), None)]
try:
tabs = (tabs + s3db.req_tabs(r))
except:
pass
try:
tabs = (tabs + s3db.inv_tabs(r))
except:
pass
rheader_fields = [['name'], ['location_id']]
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
return rheader
|
[
"def",
"org_facility_rheader",
"(",
"r",
",",
"tabs",
"=",
"[",
"]",
")",
":",
"T",
"=",
"current",
".",
"T",
"s3db",
"=",
"current",
".",
"s3db",
"(",
"tablename",
",",
"record",
")",
"=",
"s3_rheader_resource",
"(",
"r",
")",
"r",
".",
"record",
"=",
"record",
"r",
".",
"table",
"=",
"s3db",
"[",
"tablename",
"]",
"tabs",
"=",
"[",
"(",
"T",
"(",
"'Details'",
")",
",",
"None",
")",
"]",
"try",
":",
"tabs",
"=",
"(",
"tabs",
"+",
"s3db",
".",
"req_tabs",
"(",
"r",
")",
")",
"except",
":",
"pass",
"try",
":",
"tabs",
"=",
"(",
"tabs",
"+",
"s3db",
".",
"inv_tabs",
"(",
"r",
")",
")",
"except",
":",
"pass",
"rheader_fields",
"=",
"[",
"[",
"'name'",
"]",
",",
"[",
"'location_id'",
"]",
"]",
"rheader",
"=",
"S3ResourceHeader",
"(",
"rheader_fields",
",",
"tabs",
")",
"(",
"r",
")",
"return",
"rheader"
] |
rheader for facilities when doing a req_match .
|
train
| false
|
4,038
|
def diagflat(v, k=0):
if isinstance(v, cupy.ndarray):
return cupy.diag(v.ravel(), k)
else:
return cupy.diag(numpy.ndarray(v).ravel(), k)
|
[
"def",
"diagflat",
"(",
"v",
",",
"k",
"=",
"0",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"cupy",
".",
"ndarray",
")",
":",
"return",
"cupy",
".",
"diag",
"(",
"v",
".",
"ravel",
"(",
")",
",",
"k",
")",
"else",
":",
"return",
"cupy",
".",
"diag",
"(",
"numpy",
".",
"ndarray",
"(",
"v",
")",
".",
"ravel",
"(",
")",
",",
"k",
")"
] |
creates a diagonal array from the flattened input .
|
train
| false
|
4,039
|
def _get_dvs_portgroup(dvs, portgroup_name):
for portgroup in dvs.portgroup:
if (portgroup.name == portgroup_name):
return portgroup
return None
|
[
"def",
"_get_dvs_portgroup",
"(",
"dvs",
",",
"portgroup_name",
")",
":",
"for",
"portgroup",
"in",
"dvs",
".",
"portgroup",
":",
"if",
"(",
"portgroup",
".",
"name",
"==",
"portgroup_name",
")",
":",
"return",
"portgroup",
"return",
"None"
] |
return a portgroup object corresponding to the portgroup name on the dvs .
|
train
| true
|
4,040
|
def _avi_screen_size(filename):
try:
if (not filename.endswith(u'.avi')):
with io.open(filename, u'rb') as f:
header = f.read(72)
x = binascii.hexlify(header[68:72])
height = int((((x[6:8] + x[4:6]) + x[2:4]) + x[0:2]), 16)
assert (100 < height < 4320)
x = binascii.hexlify(header[64:68])
width = int((((x[6:8] + x[4:6]) + x[2:4]) + x[0:2]), 16)
assert (100 < width < 7680)
return (width, height)
except Exception:
pass
return (None, None)
|
[
"def",
"_avi_screen_size",
"(",
"filename",
")",
":",
"try",
":",
"if",
"(",
"not",
"filename",
".",
"endswith",
"(",
"u'.avi'",
")",
")",
":",
"with",
"io",
".",
"open",
"(",
"filename",
",",
"u'rb'",
")",
"as",
"f",
":",
"header",
"=",
"f",
".",
"read",
"(",
"72",
")",
"x",
"=",
"binascii",
".",
"hexlify",
"(",
"header",
"[",
"68",
":",
"72",
"]",
")",
"height",
"=",
"int",
"(",
"(",
"(",
"(",
"x",
"[",
"6",
":",
"8",
"]",
"+",
"x",
"[",
"4",
":",
"6",
"]",
")",
"+",
"x",
"[",
"2",
":",
"4",
"]",
")",
"+",
"x",
"[",
"0",
":",
"2",
"]",
")",
",",
"16",
")",
"assert",
"(",
"100",
"<",
"height",
"<",
"4320",
")",
"x",
"=",
"binascii",
".",
"hexlify",
"(",
"header",
"[",
"64",
":",
"68",
"]",
")",
"width",
"=",
"int",
"(",
"(",
"(",
"(",
"x",
"[",
"6",
":",
"8",
"]",
"+",
"x",
"[",
"4",
":",
"6",
"]",
")",
"+",
"x",
"[",
"2",
":",
"4",
"]",
")",
"+",
"x",
"[",
"0",
":",
"2",
"]",
")",
",",
"16",
")",
"assert",
"(",
"100",
"<",
"width",
"<",
"7680",
")",
"return",
"(",
"width",
",",
"height",
")",
"except",
"Exception",
":",
"pass",
"return",
"(",
"None",
",",
"None",
")"
] |
parses avi file header for width and height .
|
train
| false
|
4,041
|
def apns_send_bulk_message(registration_ids, alert, **kwargs):
certfile = kwargs.get('certfile', None)
with closing(_apns_create_socket_to_push(certfile)) as socket:
for (identifier, registration_id) in enumerate(registration_ids):
res = _apns_send(registration_id, alert, identifier=identifier, socket=socket, **kwargs)
_apns_check_errors(socket)
return res
|
[
"def",
"apns_send_bulk_message",
"(",
"registration_ids",
",",
"alert",
",",
"**",
"kwargs",
")",
":",
"certfile",
"=",
"kwargs",
".",
"get",
"(",
"'certfile'",
",",
"None",
")",
"with",
"closing",
"(",
"_apns_create_socket_to_push",
"(",
"certfile",
")",
")",
"as",
"socket",
":",
"for",
"(",
"identifier",
",",
"registration_id",
")",
"in",
"enumerate",
"(",
"registration_ids",
")",
":",
"res",
"=",
"_apns_send",
"(",
"registration_id",
",",
"alert",
",",
"identifier",
"=",
"identifier",
",",
"socket",
"=",
"socket",
",",
"**",
"kwargs",
")",
"_apns_check_errors",
"(",
"socket",
")",
"return",
"res"
] |
sends an apns notification to one or more registration_ids .
|
train
| true
|
4,043
|
def instance_get_all_by_host(context, host, columns_to_join=None):
return IMPL.instance_get_all_by_host(context, host, columns_to_join)
|
[
"def",
"instance_get_all_by_host",
"(",
"context",
",",
"host",
",",
"columns_to_join",
"=",
"None",
")",
":",
"return",
"IMPL",
".",
"instance_get_all_by_host",
"(",
"context",
",",
"host",
",",
"columns_to_join",
")"
] |
get all instances belonging to a host .
|
train
| false
|
4,044
|
def cell_to_rowcol2(cell):
m = _re_cell_ex.match(cell)
if (not m):
raise Exception('Error in cell format')
(col_abs, col, row_abs, row) = m.groups()
row = (int(row) - 1)
col = col_by_name(col.upper())
return (row, col)
|
[
"def",
"cell_to_rowcol2",
"(",
"cell",
")",
":",
"m",
"=",
"_re_cell_ex",
".",
"match",
"(",
"cell",
")",
"if",
"(",
"not",
"m",
")",
":",
"raise",
"Exception",
"(",
"'Error in cell format'",
")",
"(",
"col_abs",
",",
"col",
",",
"row_abs",
",",
"row",
")",
"=",
"m",
".",
"groups",
"(",
")",
"row",
"=",
"(",
"int",
"(",
"row",
")",
"-",
"1",
")",
"col",
"=",
"col_by_name",
"(",
"col",
".",
"upper",
"(",
")",
")",
"return",
"(",
"row",
",",
"col",
")"
] |
convert an excel cell reference string in a1 notation to numeric row/col notation .
|
train
| false
|
4,045
|
def latest_requirements_revision():
return local('hg log -r :. --template "{node|short}\n" scripts/prod-requirements.txt | tail -n 1', capture=True)
|
[
"def",
"latest_requirements_revision",
"(",
")",
":",
"return",
"local",
"(",
"'hg log -r :. --template \"{node|short}\\n\" scripts/prod-requirements.txt | tail -n 1'",
",",
"capture",
"=",
"True",
")"
] |
return the revision id of the last change to the prod-requirements file .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.