hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9018d542882a2b4348b7736cc1deaa344619663 | 9,971 | py | Python | hybridbackend/tensorflow/distribute/nccl.py | fuhailin/HybridBackend | 113383c5870b7180fa67c194208a27f76bdbf3f0 | [
"Apache-2.0"
] | 38 | 2021-12-01T06:54:36.000Z | 2022-03-23T11:23:21.000Z | hybridbackend/tensorflow/distribute/nccl.py | fuhailin/HybridBackend | 113383c5870b7180fa67c194208a27f76bdbf3f0 | [
"Apache-2.0"
] | 15 | 2021-12-01T09:15:26.000Z | 2022-03-28T02:49:21.000Z | hybridbackend/tensorflow/distribute/nccl.py | fuhailin/HybridBackend | 113383c5870b7180fa67c194208a27f76bdbf3f0 | [
"Apache-2.0"
] | 8 | 2021-12-02T01:16:14.000Z | 2022-01-28T04:51:16.000Z | # Copyright 2021 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
r'''NCCL based collective commmunication.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import tf_logging as logging
from hybridbackend.tensorflow.distribute.communicator import CollectiveOps
from hybridbackend.tensorflow.distribute.communicator import Communicator
from hybridbackend.tensorflow.distribute.pubsub import PubSub
from hybridbackend.tensorflow.pywrap import _ops
ops.NotDifferentiable('GetNcclId')
ops.NotDifferentiable('NcclComm')
ops.NotDifferentiable('CreateNcclComm')
ops.NotDifferentiable('IsNcclCommInitialized')
@ops.RegisterGradient('ReduceWithNcclComm')
def _reduce_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL reduce op.
'''
comm = op.inputs[0]
grad_in = args[0]
reduce_op = op.get_attr('reduce_op')
root_rank = op.get_attr('root_rank')
if reduce_op != CollectiveOps.SUM:
raise NotImplementedError(
'Only reduce_op=SUM is supported for gradients computation.')
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.broadcast_with_nccl_comm(
comm, grad_in, root_rank=root_rank)
return None, grad_out
@ops.RegisterGradient('ReduceScatterWithNcclComm')
def _reduce_scatter_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL reduce scatter op.
'''
comm = op.inputs[0]
grad_in = args[0]
reduce_op = op.get_attr('reduce_op')
if reduce_op != CollectiveOps.SUM:
raise NotImplementedError(
'Only reduce_op=SUM is supported for gradients computation.')
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.allgather_with_nccl_comm(comm, grad_in)
return None, grad_out
@ops.RegisterGradient('AllreduceWithNcclComm')
def _allreduce_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL allreduce op.
'''
comm = op.inputs[0]
grad_in = args[0]
reduce_op = op.get_attr('reduce_op')
if reduce_op != CollectiveOps.SUM:
raise NotImplementedError(
'Only reduce_op=SUM is supported for gradients computation.')
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.allreduce_with_nccl_comm(
comm, grad_in, reduce_op=reduce_op)
return None, grad_out
ops.NotDifferentiable('BroadcastWithNcclComm')
ops.NotDifferentiable('ScatterWithNcclComm')
ops.NotDifferentiable('GatherWithNcclComm')
ops.NotDifferentiable('GathervWithNcclComm')
@ops.RegisterGradient('AllgatherWithNcclComm')
def _allgather_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL allgather op.
'''
comm = op.inputs[0]
grad_in = args[0]
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.reduce_scatter_with_nccl_comm(
comm, grad_in, reduce_op=CollectiveOps.SUM)
return None, grad_out
ops.NotDifferentiable('AllgathervWithNcclComm')
@ops.RegisterGradient('AlltoallWithNcclComm')
def _alltoall_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL alltoall op.
'''
comm = op.inputs[0]
grad_in = args[0]
wire_dtype = op.get_attr('wire_dtype')
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.alltoall_with_nccl_comm(
comm, grad_in,
wire_dtype=wire_dtype)
return None, grad_out
@ops.RegisterGradient('AlltoallvWithNcclComm')
def _nccl_alltoallv_grad(op, *args):
r'''Gradient for NCCL alltoallv op.
'''
comm = op.inputs[0]
grad_in = list(args)[0]
grad_sizes_in = op.outputs[1]
common_shape = op.get_attr('common_shape')
wire_dtype = op.get_attr('wire_dtype')
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out, grad_sizes_out = _ops.alltoallv_with_nccl_comm(
comm, grad_in, grad_sizes_in,
wire_dtype=wire_dtype,
common_shape=common_shape)
return None, grad_out, grad_sizes_out
@ops.RegisterGradient('GroupAlltoallvWithNcclComm')
def _group_alltoallv_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL group_alltoallv op.
'''
comm = op.inputs[0]
num_columns = op.get_attr('num_columns')
wire_dtype = op.get_attr('wire_dtype')
common_shapes = op.get_attr('common_shapes')
grad_in = args[:num_columns]
grad_sizes_in = op.outputs[num_columns:]
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out, _ = _ops.group_alltoallv_with_nccl_comm(
comm, grad_in, grad_sizes_in,
wire_dtype=wire_dtype,
common_shapes=common_shapes)
return (None, *grad_out, *[None for _ in range(num_columns)])
@ops.RegisterGradient('AlltoallwWithNcclComm')
def _alltoallw_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL alltoallw op.
'''
comm = op.inputs[0]
common_shape = op.get_attr('common_shape')
wire_dtype = op.get_attr('wire_dtype')
grad_in = list(args)
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.alltoallw_with_nccl_comm(
comm, grad_in,
wire_dtype=wire_dtype,
common_shape=common_shape)
return [None] + grad_out
@ops.RegisterGradient('GroupAlltoallwWithNcclComm')
def _group_alltoallw_with_nccl_comm_grad(op, *args):
r'''Gradient for NCCL group_alltoallw op.
'''
comm = op.inputs[0]
num_columns = op.get_attr('num_columns')
wire_dtype = op.get_attr('wire_dtype')
common_shapes = op.get_attr('common_shapes')
grad_in = list(args)
with ops.device(op.device):
with ops.control_dependencies(op.outputs):
grad_out = _ops.group_alltoallw_with_nccl_comm(
comm, grad_in,
num_columns=num_columns,
wire_dtype=wire_dtype,
common_shapes=common_shapes)
return [None] + grad_out
class NcclCommunicator(Communicator):
r'''A communicator using NCCL.
'''
NAME = 'NCCL'
@classmethod
def compute_pool_capacity(cls, capacity=None):
if capacity is not None and capacity != 1:
logging.warning('Multiple-communicators pooling is unsafe.')
return capacity or 1
def _build_handle(self):
return _ops.nccl_comm_handle_op(shared_name=self.shared_name)
def _build_create_op(self):
nccl_id = PubSub(self.devices)(
_ops.get_nccl_id,
tensor_shape.TensorShape([16]), # 128 / 8
dtypes.int64,
name=f'{self.shared_name}/nccl_id')
return _ops.create_nccl_comm(
self.handle, nccl_id,
size=self.size,
rank=self.rank,
shared_name=self.shared_name)
def _build_is_initialized_op(self):
return _ops.is_nccl_comm_initialized(self.handle)
def _reduce(self, value, reduce_op, root_rank, name):
return _ops.reduce_with_nccl_comm(
self.handle, value,
reduce_op=reduce_op,
root_rank=root_rank,
name=name)
def _reduce_scatter(self, value, reduce_op, name):
return _ops.reduce_scatter_with_nccl_comm(
self.handle, value,
reduce_op=reduce_op,
name=name)
def _allreduce(self, value, reduce_op, name):
return _ops.allreduce_with_nccl_comm(
self.handle, value,
reduce_op=reduce_op,
name=name)
def _broadcast(self, value, root_rank, name):
return _ops.broadcast_with_nccl_comm(
self.handle, value,
root_rank=root_rank,
name=name)
def _scatter(self, value, root_rank, name):
raise NotImplementedError
def _gather(self, value, root_rank, name):
raise NotImplementedError
def _gatherv(self, value, root_rank, name):
raise NotImplementedError
def _allgather(self, value, name):
return _ops.allgather_with_nccl_comm(self.handle, value, name=name)
def _allgatherv(self, value, name):
return _ops.allgatherv_with_nccl_comm(self.handle, value, name=name)
def _alltoall(self, value, wire_dtype, name, **kwargs):
return _ops.alltoall_with_nccl_comm(
self.handle, value,
wire_dtype=wire_dtype,
name=name)
def _alltoallv(self, value, sizes, wire_dtype, common_shape, name, **kwargs):
return _ops.alltoallv_with_nccl_comm(
self.handle, value, sizes,
common_shape=common_shape,
wire_dtype=wire_dtype,
name=name)
def _group_alltoallv(
self, values, sizes, wire_dtype, common_shapes, name, **kwargs):
return _ops.group_alltoallv_with_nccl_comm(
self.handle, values, sizes,
common_shapes=common_shapes,
wire_dtype=wire_dtype,
name=name)
def _alltoallw(self, values, wire_dtype, common_shape, name, **kwargs):
return _ops.alltoallw_with_nccl_comm(
self.handle, values,
common_shape=common_shape,
wire_dtype=wire_dtype,
name=name)
def _group_alltoallw(
self, group_values, wire_dtype, common_shapes, name, **kwargs):
flatten_results = _ops.group_alltoallw_with_nccl_comm(
self._handle,
sum(group_values, []),
num_columns=len(group_values),
wire_dtype=wire_dtype,
common_shapes=common_shapes)
results = []
for v in flatten_results:
if results and len(results[-1]) < self.size:
results[-1].append(v)
else:
results.append([v])
return results
Communicator.register(NcclCommunicator)
| 31.454259 | 79 | 0.726407 |
3bd87e222b8002bf3226dc2bc73851c0a32da605 | 3,066 | py | Python | train_dynamics_prediction.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | null | null | null | train_dynamics_prediction.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | null | null | null | train_dynamics_prediction.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | 1 | 2021-12-07T13:45:33.000Z | 2021-12-07T13:45:33.000Z | import numpy as np
import gym
import torch
import torch.nn as nn
import argparse
import pickle
from functools import partial
state_vector = pickle.load(open("./data/state_vector_ccadea_forward.p", "rb"))
state_labels = pickle.load(open("./data/state_labels_ccadea_forward.p", "rb"))
state_vector_train = state_vector[0:40000]
state_labels_train = state_labels[0:40000][:, 3]
state_vector_test = state_vector[40000:50000]
state_labels_test = state_labels[40000:50000][:, 3]
s = np.arange(state_vector_train.shape[0])
np.random.shuffle(s)
state_vector_train = torch.from_numpy(state_vector_train[s])
state_labels_train = torch.from_numpy(state_labels_train[s])
s = np.arange(state_vector_test.shape[0])
np.random.shuffle(s)
state_vector_test = torch.from_numpy(state_vector_test[s])
state_labels_test = torch.from_numpy(state_labels_test[s])
print(state_vector_train.shape)
print(state_labels_train.shape)
input_nodes, hidden_nodes, output_nodes, batch_size = 600, 600, 1, 100
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.fc1 = nn.Linear(input_nodes, hidden_nodes)
self.fc2 = nn.Linear(hidden_nodes, hidden_nodes)
self.fc3 = nn.Linear(hidden_nodes, output_nodes)
def forward(self, x):
x = self.fc1(x)
x = torch.relu(x)
x = self.fc2(x)
x = torch.tanh(x)
x = self.fc3(x)
return x
model = Net()
criterion = nn.L1Loss()
optimizer = torch.optim.Adam(model.parameters(), lr=5e-5)
for epoch in range(1000):
permutation = torch.randperm(state_vector_train.shape[0])
for data in range(0, state_vector_train.shape[0], batch_size):
optimizer.zero_grad()
# Forward Propagation
indices = permutation[data:data+batch_size]
batch_x, batch_y = state_vector_train[indices], state_labels_train[indices]
batch_y = batch_y.unsqueeze(1)
# print(batch_x.shape, batch_y.shape)
output = model(batch_x.float()) # Compute and print loss
print(output.shape, batch_y.float().shape)
loss = criterion(output, batch_y.float())
print('epoch: ', epoch, 'data: ', data,' loss: ', loss.item()) # Zero the gradients
loss.backward()
optimizer.step()
pickle.dump(model, open("./trained_models_sl/COM_Adam_1000epoch_5e-5lr_40000+10000_forward.p", "wb"))
model = pickle.load(open("./trained_models_sl/COM_Adam_1000epoch_5e-5lr_40000+10000_forward.p", "rb"))
model.eval()
with torch.no_grad():
predict = model(state_vector_test.float())
print(predict.size)
loss = criterion(predict, state_labels_test.unsqueeze(1).float())
print(loss)
percent = 0
for data in range(0, 10000):
percent += torch.abs((predict[data] - state_labels_test[data])/state_labels_test[data] * 100)
print("Predicted: ", predict[data], "Actual: ", state_labels_test[data], "Percentage: ", (predict[data] - state_labels_test[data])/state_labels_test[data] * 100, "%")
print("L1Loss: ", loss.item())
print("AvgPc: ", percent/10000)
print("forward.p")
| 32.967742 | 170 | 0.701892 |
52b286c51738be5d2a00709ba3092a0153a976a1 | 1,782 | py | Python | ietf/stats/management/commands/fetch_meeting_attendance.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | ietf/stats/management/commands/fetch_meeting_attendance.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | ietf/stats/management/commands/fetch_meeting_attendance.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | # Copyright 2016 IETF Trust
import syslog
from django.core.management.base import BaseCommand, CommandError
import debug # pyflakes:ignore
from ietf.meeting.models import Meeting
from ietf.stats.utils import get_meeting_registration_data
logtag = __name__.split('.')[-1]
logname = "user.log"
syslog.openlog(logtag, syslog.LOG_PID, syslog.LOG_USER)
class Command(BaseCommand):
help = "Fetch meeting attendee figures from ietf.org/registration/attendees."
def add_arguments(self, parser):
parser.add_argument("--meeting", help="meeting to fetch data for")
parser.add_argument("--all", action="store_true", help="fetch data for all meetings")
parser.add_argument("--latest", type=int, help="fetch data for latest N meetings")
def handle(self, *args, **options):
self.verbosity = options['verbosity']
meetings = Meeting.objects.none()
if options['meeting']:
meetings = Meeting.objects.filter(number=options['meeting'], type="ietf")
elif options['all']:
meetings = Meeting.objects.filter(type="ietf").order_by("date")
elif options['latest']:
meetings = Meeting.objects.filter(type="ietf").order_by("-date")[:options['latest']]
else:
raise CommandError("Please use one of --meeting, --all or --latest")
for meeting in meetings:
added, processed, total = get_meeting_registration_data(meeting)
msg = "Fetched data for meeting %3s: %4d processed, %4d added, %4d in table" % (meeting.number, processed, added, total)
if self.stdout.isatty():
self.stdout.write(msg+'\n') # make debugging a bit easier
else:
syslog.syslog(msg)
| 39.6 | 132 | 0.647587 |
d83b2e21d87473bd59a44ed13c3d5b10d184d269 | 2,513 | py | Python | model_runner/validator/_tests/test_validator_utils.py | kevinyamauchi/model-runner | 77261bcd293de87533776291f44a09bf28a3c845 | [
"BSD-3-Clause"
] | null | null | null | model_runner/validator/_tests/test_validator_utils.py | kevinyamauchi/model-runner | 77261bcd293de87533776291f44a09bf28a3c845 | [
"BSD-3-Clause"
] | 5 | 2021-09-27T14:43:39.000Z | 2022-02-23T14:18:00.000Z | model_runner/validator/_tests/test_validator_utils.py | kevinyamauchi/model-runner | 77261bcd293de87533776291f44a09bf28a3c845 | [
"BSD-3-Clause"
] | null | null | null | import copy
import os
import pytest
from pydantic import ValidationError
from model_runner.validator.validator_utils import ConfigModel
def test_good_config(base_config):
# test gpu config
config = ConfigModel(**base_config)
# test some specific parameters
assert (
type(config.dict()["job_prefix"]) == str
), '"job_prefix" should be of type str.'
assert (
type(config.dict()["job_parameters"]["run_time"]) == int
), '"run_time" should be of type int.'
assert config.dict()["job_parameters"]["logfile_dir"].endswith(
os.path.sep
), f'"logfile_dir" does not end with "{os.path.sep}".'
assert os.path.isdir(
config.dict()["output_base_dir"]
), '"output_base_dir" is not a directory'
# test cpu config
cpu_config = copy.deepcopy(base_config)
cpu_config["job_parameters"].pop("gpu_type")
cpu_config["job_parameters"].pop("ngpus")
config = ConfigModel(**cpu_config)
# test some specific parameters of cpu config
assert (
config.dict()["job_parameters"]["gpu_type"] is None
), '"gpu_type" should be None.'
assert config.dict()["job_parameters"]["ngpus"] is None, '"ngpus" should be None.'
def test_bad_configs(base_config):
# test gpu_type
bad_config = copy.deepcopy(base_config)
bad_config["job_parameters"].pop("gpu_type")
bad_config["job_parameters"]["gpu_type"] = "Titanium"
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
# test omitting gpu_type and ngpus
bad_config = copy.deepcopy(base_config)
bad_config["job_parameters"].pop("ngpus")
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
# test run_time
bad_config = copy.deepcopy(base_config)
bad_config["job_parameters"].pop("run_time")
bad_config["job_parameters"]["run_time"] = "3:120"
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
# test data
bad_config = copy.deepcopy(base_config)
bad_config["runner_parameters"].pop("data")
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
bad_config["runner_parameters"]["data"] = ["/some/bad/path"]
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
# test runner
bad_config = copy.deepcopy(base_config)
bad_config.pop("runner")
bad_config["runner"] = "/non/existant/file"
with pytest.raises(ValidationError):
_ = ConfigModel(**bad_config)
| 29.564706 | 86 | 0.675686 |
158ed1bc20e2db4a2fb193cec6eee5f41e903de1 | 921 | py | Python | setup.py | xdusongwei/rdict | ab275a9db079b18cd4da17110441d3407bdb106a | [
"MIT"
] | null | null | null | setup.py | xdusongwei/rdict | ab275a9db079b18cd4da17110441d3407bdb106a | [
"MIT"
] | null | null | null | setup.py | xdusongwei/rdict | ab275a9db079b18cd4da17110441d3407bdb106a | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='rdict',
version='0.1.1',
author='songwei',
author_email='songwei@songwei.io',
description='A Pythonic tool is used for operating redis commands.',
long_description='',
url='https://github.com/xdusongwei/rdict',
python_requires='>=3',
packages=['rdict'],
ext_modules=[],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
],
install_requires=["redis"],
)
| 30.7 | 72 | 0.59392 |
bf410a88d2495134f82303f4978fb96616ea723f | 323 | py | Python | dcbase/decorator/profileModel.py | tctimmeh/dc-django-base | 08e444387c7ce4896343ac7e61444bbd7f98b4a6 | [
"MIT"
] | null | null | null | dcbase/decorator/profileModel.py | tctimmeh/dc-django-base | 08e444387c7ce4896343ac7e61444bbd7f98b4a6 | [
"MIT"
] | 13 | 2015-02-16T17:13:34.000Z | 2015-03-07T04:59:28.000Z | dcbase/decorator/profileModel.py | tctimmeh/dc-django-base | 08e444387c7ce4896343ac7e61444bbd7f98b4a6 | [
"MIT"
] | null | null | null | from dcbase.lib.profileEditor import ProfileEditor
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse_lazy
def profile_model():
def _wrapper(profile_class):
ProfileEditor.register_profile_model(profile_class)
return profile_class
return _wrapper
| 26.916667 | 59 | 0.804954 |
ec53b8808748eae28af8a4692972d70adf01df26 | 8,011 | py | Python | tests/validation/tests/v3_api/test_rke2_airgap.py | rand4711/rancher | e6c4492dfb2fab813ca9f6443df393b89f1e3431 | [
"Apache-2.0"
] | 18,697 | 2015-07-09T17:56:35.000Z | 2022-03-31T21:10:25.000Z | tests/validation/tests/v3_api/test_rke2_airgap.py | rand4711/rancher | e6c4492dfb2fab813ca9f6443df393b89f1e3431 | [
"Apache-2.0"
] | 31,132 | 2015-07-09T17:28:32.000Z | 2022-03-31T23:31:26.000Z | tests/validation/tests/v3_api/test_rke2_airgap.py | rand4711/rancher | e6c4492dfb2fab813ca9f6443df393b89f1e3431 | [
"Apache-2.0"
] | 2,979 | 2015-07-12T02:48:07.000Z | 2022-03-31T13:35:39.000Z | import os
from lib.aws import AWS_USER
from .common import AmazonWebServices
from .test_airgap import (AG_HOST_NAME, NUMBER_OF_INSTANCES, TARBALL_TYPE,
get_bastion_node, prepare_registries_mirror_on_node,
run_command_on_airgap_node, deploy_airgap_cluster,
prepare_private_registry, copy_certs_to_node,
trust_certs_on_node, add_tarball_to_node,
optionally_add_cluster_to_rancher)
RANCHER_RKE2_VERSION = os.environ.get("RANCHER_RKE2_VERSION", "")
RKE2_SERVER_OPTIONS = os.environ.get("RKE2_SERVER_OPTIONS", "")
RKE2_AGENT_OPTIONS = os.environ.get("RKE2_AGENT_OPTIONS", "")
def test_deploy_airgap_rke2_all():
reg_bastion_node = get_bastion_node(auth=True)
noauth_bastion_node = get_bastion_node(auth=False)
add_rke2_tarball_to_bastion(noauth_bastion_node, RANCHER_RKE2_VERSION)
prepare_private_registry(noauth_bastion_node, RANCHER_RKE2_VERSION)
prepare_private_registry(reg_bastion_node, RANCHER_RKE2_VERSION)
ag_nodes = prepare_airgap_rke2(noauth_bastion_node, NUMBER_OF_INSTANCES,
'tarball+mirror+system-default')
for ag_node in ag_nodes:
copy_certs_to_node(reg_bastion_node, ag_node)
prepare_registries_mirror_on_node(reg_bastion_node, ag_node, 'rke2')
server_ops = RKE2_SERVER_OPTIONS + " --system-default-registry={}".format(
noauth_bastion_node.host_name)
agent_ops = RKE2_AGENT_OPTIONS + " --system-default-registry={}".format(
noauth_bastion_node.host_name)
deploy_airgap_cluster(noauth_bastion_node, ag_nodes, "rke2",
server_ops, agent_ops)
optionally_add_cluster_to_rancher(noauth_bastion_node, ag_nodes)
def test_deploy_airgap_rke2_system_default_registry():
bastion_node = get_bastion_node(auth=False)
prepare_private_registry(bastion_node, RANCHER_RKE2_VERSION)
ag_nodes = prepare_airgap_rke2(bastion_node, NUMBER_OF_INSTANCES,
'system_default_registry')
server_ops = RKE2_SERVER_OPTIONS + " --system-default-registry={}".format(
bastion_node.host_name)
agent_ops = RKE2_AGENT_OPTIONS + " --system-default-registry={}".format(
bastion_node.host_name)
deploy_airgap_cluster(bastion_node, ag_nodes, "rke2",
server_ops, agent_ops)
def test_deploy_airgap_rke2_private_registry():
bastion_node = get_bastion_node(auth=True)
prepare_private_registry(bastion_node, RANCHER_RKE2_VERSION)
ag_nodes = prepare_airgap_rke2(bastion_node, NUMBER_OF_INSTANCES,
'private_registry')
deploy_airgap_cluster(bastion_node, ag_nodes, "rke2",
RKE2_SERVER_OPTIONS, RKE2_AGENT_OPTIONS)
optionally_add_cluster_to_rancher(bastion_node, ag_nodes)
def test_deploy_airgap_rke2_tarball():
bastion_node = get_bastion_node()
add_rke2_tarball_to_bastion(bastion_node, RANCHER_RKE2_VERSION)
ag_nodes = prepare_airgap_rke2(
bastion_node, NUMBER_OF_INSTANCES, 'tarball')
deploy_airgap_cluster(bastion_node, ag_nodes, "rke2",
RKE2_SERVER_OPTIONS, RKE2_AGENT_OPTIONS)
def add_rke2_tarball_to_bastion(bastion_node, rke2_version):
get_tarball_command = \
'wget -O rke2-airgap-images.{1} https://github.com/rancher/rke2/' \
'releases/download/{0}/rke2-images.linux-amd64.{1} && ' \
'wget -O rke2 https://github.com/rancher/rke2/releases/' \
'download/{0}/rke2.linux-amd64'.format(rke2_version, TARBALL_TYPE)
bastion_node.execute_command(get_tarball_command)
if '--cni=calico' in RKE2_SERVER_OPTIONS:
get_calico_tarball_command = \
'wget -O rke2-airgap-images-calico.{1} ' \
'https://github.com/rancher/rke2/releases/download/{0}/' \
'rke2-images-calico.linux-amd64.{1}'.format(rke2_version,
TARBALL_TYPE)
bastion_node.execute_command(get_calico_tarball_command)
elif '--cni=cilium' in RKE2_SERVER_OPTIONS:
get_cilium_tarball_command = \
'wget -O rke2-airgap-images-cilium.{1} ' \
'https://github.com/rancher/rke2/releases/download/{0}/' \
'rke2-images-cilium.linux-amd64.{1}'.format(rke2_version,
TARBALL_TYPE)
bastion_node.execute_command(get_cilium_tarball_command)
if 'multus' in RKE2_SERVER_OPTIONS:
get_multus_tarball_command = \
'wget -O rke2-airgap-images-multus.{1} ' \
'https://github.com/rancher/rke2/releases/download/{0}/' \
'rke2-images-multus.linux-amd64.{1}'.format(rke2_version,
TARBALL_TYPE)
bastion_node.execute_command(get_multus_tarball_command)
def prepare_airgap_rke2(bastion_node, number_of_nodes, method):
node_name = AG_HOST_NAME + "-rke2-airgap"
# Create Airgap Node in AWS
ag_nodes = AmazonWebServices().create_multiple_nodes(
number_of_nodes, node_name, public_ip=False)
for num, ag_node in enumerate(ag_nodes):
# Copy relevant rke2 files to airgapped node
ag_node_copy_files = \
'scp -i "{0}.pem" -o StrictHostKeyChecking=no ./rke2 ' \
'{1}@{2}:~/rke2'.format(bastion_node.ssh_key_name, AWS_USER,
ag_node.private_ip_address)
bastion_node.execute_command(ag_node_copy_files)
ag_node_make_executable = \
'sudo mv ./rke2 /usr/local/bin/rke2 && ' \
'sudo chmod +x /usr/local/bin/rke2'
run_command_on_airgap_node(bastion_node, ag_node,
ag_node_make_executable)
if method == 'private_registry':
copy_certs_to_node(bastion_node, ag_node)
prepare_registries_mirror_on_node(bastion_node, ag_node, 'rke2')
elif method == 'system_default_registry':
copy_certs_to_node(bastion_node, ag_node)
trust_certs_on_node(bastion_node, ag_node)
elif method == 'tarball+mirror':
copy_certs_to_node(bastion_node, ag_node)
prepare_registries_mirror_on_node(bastion_node, ag_node, 'rke2')
else:
copy_certs_to_node(bastion_node, ag_node)
trust_certs_on_node(bastion_node, ag_node)
if 'tarball' in method:
add_tarball_to_node(bastion_node, ag_node,
'rke2-airgap-images.{}'.format(TARBALL_TYPE),
'rke2')
if '--cni=calico' in RKE2_SERVER_OPTIONS:
add_tarball_to_node(
bastion_node, ag_node,
'rke2-airgap-images-calico.{}'.format(TARBALL_TYPE),
'rke2')
elif '--cni=cilium' in RKE2_SERVER_OPTIONS:
add_tarball_to_node(
bastion_node, ag_node,
'rke2-airgap-images-cilium.{}'.format(TARBALL_TYPE),
'rke2')
if 'multus' in RKE2_SERVER_OPTIONS:
add_tarball_to_node(
bastion_node, ag_node,
'rke2-airgap-images-multus.{}'.format(TARBALL_TYPE),
'rke2')
print("Airgapped RKE2 Instance Details:\nNAME: {}-{}\nPRIVATE IP: {}\n"
"".format(node_name, num, ag_node.private_ip_address))
assert len(ag_nodes) == NUMBER_OF_INSTANCES
print(
'{} airgapped rke2 instance(s) created.\n'
'Connect to these and run commands by connecting to bastion node, '
'then connecting to these:\n'
'ssh -i {}.pem {}@NODE_PRIVATE_IP'.format(
NUMBER_OF_INSTANCES, bastion_node.ssh_key_name, AWS_USER))
for ag_node in ag_nodes:
assert ag_node.private_ip_address is not None
assert ag_node.public_ip_address is None
return ag_nodes
| 48.259036 | 79 | 0.65073 |
1ab94fb2beeaec990377af5a4232c5418546845b | 1,571 | py | Python | lib/score_util.py | alisure-ml/Semantic-Segmentation-clockwork-fcn | 2a870feb5339068a283d106ff2d6b4c5a20ab966 | [
"BSD-4-Clause-UC"
] | 2 | 2021-01-15T19:13:19.000Z | 2021-11-17T11:37:56.000Z | lib/score_util.py | alisure-ml/Semantic-Segmentation-clockwork-fcn | 2a870feb5339068a283d106ff2d6b4c5a20ab966 | [
"BSD-4-Clause-UC"
] | null | null | null | lib/score_util.py | alisure-ml/Semantic-Segmentation-clockwork-fcn | 2a870feb5339068a283d106ff2d6b4c5a20ab966 | [
"BSD-4-Clause-UC"
] | 1 | 2021-01-15T19:14:13.000Z | 2021-01-15T19:14:13.000Z | from __future__ import division
import numpy as np
def fast_hist(a, b, n):
k = np.where((a >= 0) & (a < n))[0]
return np.bincount(n * a[k].astype(int) + b[k], minlength=n ** 2).reshape(n, n)
def report_scores(meta, hist):
print('>>>', meta)
# overall accuracy
acc = np.diag(hist).sum() / hist.sum()
print('overall accuracy', acc)
# per-class accuracy
cl_acc = np.nanmean(np.diag(hist) / hist.sum(1))
print('mean accuracy', cl_acc)
# per-class IU
iu = np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist))
print('mean IU', np.nanmean(iu))
freq = hist.sum(1) / hist.sum()
fw_iu = (freq[freq > 0] * iu[freq > 0]).sum()
print('fwavacc', fw_iu)
return acc, cl_acc, np.nanmean(iu[iu > 0]), fw_iu
def get_scores(hist):
acc = np.diag(hist).sum() / hist.sum()
# per-class accuracy
cl_acc = np.nanmean(np.diag(hist) / hist.sum(1))
# per-class IU
iu = np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist))
freq = hist.sum(1) / hist.sum()
fw_iu = (freq[freq > 0] * iu[freq > 0]).sum()
return acc, cl_acc, np.nanmean(iu), fw_iu
def score_out_gt(out, gt, n_cl=None):
"""
Score output against groundtruth for # classes.
"""
assert (n_cl is not None)
hist = fast_hist(gt.flatten(), out.flatten(), n_cl)
return hist
def score_out_gt_bdry(out, gt, bdry, n_cl=None):
"""
Score output against groundtruth on boundary for # classes.
"""
assert (n_cl is not None)
hist = fast_hist(gt[0, bdry], out[bdry], n_cl)
return hist
| 27.086207 | 83 | 0.598345 |
ba115f68731f83789c5de04edf5ced708d6f38f8 | 361 | py | Python | retrievalaccuracy/__init__.py | joshjchayes/RetrievalAccuracy | 85c7cb0c90ddf7512dcf3aec7c5fc1a9d9f9d9c7 | [
"MIT"
] | null | null | null | retrievalaccuracy/__init__.py | joshjchayes/RetrievalAccuracy | 85c7cb0c90ddf7512dcf3aec7c5fc1a9d9f9d9c7 | [
"MIT"
] | null | null | null | retrievalaccuracy/__init__.py | joshjchayes/RetrievalAccuracy | 85c7cb0c90ddf7512dcf3aec7c5fc1a9d9f9d9c7 | [
"MIT"
] | null | null | null | '''
RetrievalAccuracy is a Python 3 package designed to provide a consistent method
for assigning accuracy metrics to different retrieval results when the true
result is known. This can be used for testing and comparing retrieval
algorithms.
'''
from .metric_calculator import MetricCalculator
#from .metric_plotter import MetricPlotter
__version__='0.2.0'
| 25.785714 | 79 | 0.814404 |
fa08daca8a4122cfd5e82cf3feffb67564e7aadc | 2,400 | py | Python | doc/test_directive3.py | tmarktaylor/phmdoctest | 6974c95f8019aeea7e99a0cd7cafb06846291a35 | [
"MIT"
] | 16 | 2020-06-01T21:50:04.000Z | 2022-03-10T00:47:47.000Z | doc/test_directive3.py | Borda/phmdoctest | 36b657fae364b5a6dcf59f8b4d92e5fb6dd713bd | [
"MIT"
] | 18 | 2020-06-01T22:08:59.000Z | 2022-03-30T15:26:10.000Z | doc/test_directive3.py | Borda/phmdoctest | 36b657fae364b5a6dcf59f8b4d92e5fb6dd713bd | [
"MIT"
] | 3 | 2020-06-23T02:04:25.000Z | 2022-02-10T21:32:19.000Z | """pytest file built from doc/directive3.md"""
import pytest
from phmdoctest.fixture import managenamespace
from phmdoctest.functions import _phm_compare_exact
def test_code_13_output_17(capsys):
not_shared = "Hello World!"
print(not_shared)
_phm_expected_str = """\
Hello World!
"""
_phm_compare_exact(a=_phm_expected_str, b=capsys.readouterr().out)
def test_not_visible():
try:
print(not_shared)
except NameError:
pass
else:
assert False, "did not get expected NameError"
# Caution- no assertions.
def test_directive_share_names(managenamespace):
import string
x, y, z = 77, 88, 99
def incrementer(x):
return x + 1
grades = ["A", "B", "C"]
# Caution- no assertions.
managenamespace(operation="update", additions=locals())
def test_code_53_output_60(capsys):
print("string.digits=", string.digits)
print(incrementer(10))
print(grades)
print(x, y, z)
_phm_expected_str = """\
string.digits= 0123456789
11
['A', 'B', 'C']
77 88 99
"""
_phm_compare_exact(a=_phm_expected_str, b=capsys.readouterr().out)
def test_code_70():
grades.append("D")
# Caution- no assertions.
def test_code_75_output_79(capsys):
print(grades == ["A", "B", "C", "D"])
_phm_expected_str = """\
True
"""
_phm_compare_exact(a=_phm_expected_str, b=capsys.readouterr().out)
def test_code_85_output_93(capsys, managenamespace):
hex_digits = string.hexdigits
print(hex_digits)
_phm_expected_str = """\
0123456789abcdefABCDEF
"""
_phm_compare_exact(a=_phm_expected_str, b=capsys.readouterr().out)
managenamespace(operation="update", additions=locals())
def test_code_108_output_114(capsys, managenamespace):
print("Names are cleared after the code runs.")
print(grades == ["A", "B", "C", "D"])
print(hex_digits)
_phm_expected_str = """\
Names are cleared after the code runs.
True
0123456789abcdefABCDEF
"""
_phm_compare_exact(a=_phm_expected_str, b=capsys.readouterr().out)
managenamespace(operation="clear")
def test_code_121():
try:
print(grades)
except NameError:
pass
else:
assert False, "expected NameError for grades"
try:
print(hex_digits)
except NameError:
pass
else:
assert False, "expected NameError for hex_digits"
# Caution- no assertions.
| 21.238938 | 70 | 0.676667 |
b3bb758d3270602debdda62f12e6279f9c48d534 | 2,594 | py | Python | test/core/sampler/pymc3_test.py | deepchatterjeeligo/bilby | 6532c63d0d970ad33478ae0b1c28d4c570b74047 | [
"MIT"
] | null | null | null | test/core/sampler/pymc3_test.py | deepchatterjeeligo/bilby | 6532c63d0d970ad33478ae0b1c28d4c570b74047 | [
"MIT"
] | null | null | null | test/core/sampler/pymc3_test.py | deepchatterjeeligo/bilby | 6532c63d0d970ad33478ae0b1c28d4c570b74047 | [
"MIT"
] | null | null | null | import unittest
from unittest.mock import MagicMock
import pytest
import bilby
@pytest.mark.xfail(
raises=AttributeError,
reason="Dependency issue with pymc3 causes attribute error on import",
)
class TestPyMC3(unittest.TestCase):
def setUp(self):
self.likelihood = MagicMock()
self.priors = bilby.core.prior.PriorDict(
dict(a=bilby.core.prior.Uniform(0, 1), b=bilby.core.prior.Uniform(0, 1))
)
self.sampler = bilby.core.sampler.Pymc3(
self.likelihood,
self.priors,
outdir="outdir",
label="label",
use_ratio=False,
plot=False,
skip_import_verification=True,
)
def tearDown(self):
del self.likelihood
del self.priors
del self.sampler
def test_default_kwargs(self):
expected = dict(
draws=500,
step=None,
init="auto",
n_init=200000,
start=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
progressbar=True,
model=None,
nuts_kwargs=None,
step_kwargs=None,
random_seed=None,
discard_tuned_samples=True,
compute_convergence_checks=True,
)
expected.update(self.sampler.default_nuts_kwargs)
expected.update(self.sampler.default_step_kwargs)
self.assertDictEqual(expected, self.sampler.kwargs)
def test_translate_kwargs(self):
expected = dict(
draws=500,
step=None,
init="auto",
n_init=200000,
start=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
progressbar=True,
model=None,
nuts_kwargs=None,
step_kwargs=None,
random_seed=None,
discard_tuned_samples=True,
compute_convergence_checks=True,
)
expected.update(self.sampler.default_nuts_kwargs)
expected.update(self.sampler.default_step_kwargs)
self.sampler.kwargs["draws"] = 123
for equiv in bilby.core.sampler.base_sampler.NestedSampler.npoints_equiv_kwargs:
new_kwargs = self.sampler.kwargs.copy()
del new_kwargs["draws"]
new_kwargs[equiv] = 500
self.sampler.kwargs = new_kwargs
self.assertDictEqual(expected, self.sampler.kwargs)
if __name__ == "__main__":
unittest.main()
| 28.505495 | 88 | 0.569391 |
281469fa0208e12441ccfe297f5a7d2d661ae868 | 1,836 | py | Python | pluto/setup/directory_setup.py | chalant/zipline-lib | 836473160f369165132f009bfe620b5d5ca5c946 | [
"Apache-2.0"
] | null | null | null | pluto/setup/directory_setup.py | chalant/zipline-lib | 836473160f369165132f009bfe620b5d5ca5c946 | [
"Apache-2.0"
] | null | null | null | pluto/setup/directory_setup.py | chalant/zipline-lib | 836473160f369165132f009bfe620b5d5ca5c946 | [
"Apache-2.0"
] | null | null | null | from os import path
from sqlalchemy import create_engine
from directory import directory as dr
from directory import utils as du
# root_dir = dr.create_directory(path.expanduser('~./pluto'), 'root')
# root_dir.add_file('setup', 'yaml')
#
# setup_dir = root_dir.add_directory('setup')
# setup_dir.add_file('run_setup', 'yaml')
# setup_dir.add_file('metadata', 'sqlite')
#
# universes_dir = root_dir.add_directory('universes')
# universes_dir.add_file('metadata', 'sqlite')
#
# # universe_engine = create_engine('sqlite://' + universes_dir.get_file('metadata', 'sqlite').path)
#
# strategies_dir = root_dir.add_directory('strategies')
# strategy_states_dir = strategies_dir.add_directory('states')
# strategy_performance_dir = strategies_dir.add_directory('performance')
#
# root_dir.add_file('metadata', 'sqlite')
#
# strategies_db_engine = create_engine(
# 'sqlite://' + strategies_dir.get_file('metadata', 'sqlite').path)
def create_directory(root_path):
root_dir = dr.create_directory(root_path, 'root')
root_dir.add_file('setup', 'yaml')
setup_dir = root_dir.add_directory('setup')
setup_dir.add_file('run_setup', 'yaml')
setup_dir.add_file('metadata', 'sqlite')
universes_dir = root_dir.add_directory('universes')
universes_dir.add_file('metadata', 'sqlite')
# universe_engine = create_engine('sqlite://' + universes_dir.get_file('metadata', 'sqlite').path)
strategies_dir = root_dir.add_directory('strategies')
strategy_states_dir = strategies_dir.add_directory('states')
strategy_performance_dir = strategies_dir.add_directory('performance')
strategy_metadata = strategies_dir.add_file('metadata', 'sqlite')
root_dir.add_file('metadata', 'sqlite')
strategies_db_engine = create_engine(
'sqlite://' + strategy_metadata.path)
return root_dir
| 30.6 | 102 | 0.736928 |
369ea0fd139adee70540bb8c6183630bdb73ad50 | 680 | py | Python | apps/mappings/migrations/0008_auto_20210908_0617.py | fylein/fyle-qbo-api | 627339ddbab37cafc8aa419a757d826759d86778 | [
"MIT"
] | null | null | null | apps/mappings/migrations/0008_auto_20210908_0617.py | fylein/fyle-qbo-api | 627339ddbab37cafc8aa419a757d826759d86778 | [
"MIT"
] | 22 | 2020-01-03T11:00:00.000Z | 2022-03-22T11:12:35.000Z | apps/mappings/migrations/0008_auto_20210908_0617.py | fylein/fyle-qbo-api | 627339ddbab37cafc8aa419a757d826759d86778 | [
"MIT"
] | 3 | 2020-03-09T12:56:31.000Z | 2020-06-10T07:55:47.000Z | # Generated by Django 3.0.3 on 2021-09-08 06:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mappings', '0007_auto_20210722_1446'),
]
operations = [
migrations.AddField(
model_name='generalmapping',
name='default_tax_code_id',
field=models.CharField(help_text='QBO default Tax Code ID', max_length=255, null=True),
),
migrations.AddField(
model_name='generalmapping',
name='default_tax_code_name',
field=models.CharField(help_text='QBO default Tax Code name', max_length=255, null=True),
),
]
| 28.333333 | 101 | 0.626471 |
e0f9bbef3355f18ea62be91313835da5ed7ecd0b | 717 | py | Python | examples/run_dfwa.py | rhododendrom/NiaPy | 873037e4337474bb75714f1c2be273c97de3eded | [
"MIT"
] | 1 | 2020-03-16T11:15:43.000Z | 2020-03-16T11:15:43.000Z | examples/run_dfwa.py | rhododendrom/NiaPy | 873037e4337474bb75714f1c2be273c97de3eded | [
"MIT"
] | null | null | null | examples/run_dfwa.py | rhododendrom/NiaPy | 873037e4337474bb75714f1c2be273c97de3eded | [
"MIT"
] | null | null | null | # encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix
from NiaPy.algorithms.basic import DynamicFireworksAlgorithm
from NiaPy.util import StoppingTask
from NiaPy.task.task import OptimizationType
from NiaPy.benchmarks import Sphere
# we will run Fireworks Algorithm for 5 independent runs
for i in range(5):
task = StoppingTask(D=10, nGEN=80, optType=OptimizationType.MINIMIZATION, benchmark=Sphere())
algo = DynamicFireworksAlgorithm(N=70, Ainit=0.1, Afinal=0.9)
best = algo.run(task=task)
print('%s -> %s' % (best[0], best[1]))
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| 34.142857 | 94 | 0.767085 |
35be45e9594b061820cb74b528e17fbea090c7c8 | 2,139 | py | Python | Controllers/MatchupDisplay.py | fefelson/MLBProjections | 444a1c071cb7b1e21eedc49cf63ae91e80d37978 | [
"MIT"
] | null | null | null | Controllers/MatchupDisplay.py | fefelson/MLBProjections | 444a1c071cb7b1e21eedc49cf63ae91e80d37978 | [
"MIT"
] | null | null | null | Controllers/MatchupDisplay.py | fefelson/MLBProjections | 444a1c071cb7b1e21eedc49cf63ae91e80d37978 | [
"MIT"
] | null | null | null | import wx
from MLBProjections.MLBProjections.Views.MatchupDisplay import MatchupDisplay, MatchupPanel
from MLBProjections.MLBProjections.LineupSetter import LineupSetter
from MLBProjections.MLBProjections.SimulationSetter import SimulationSetter
################################################################################
################################################################################
################################################################################
################################################################################
class MatchupDisplayController:
def __init__(self, groundControl):
self.groundControl = groundControl
self.displayView = MatchupDisplay(groundControl.getMainPanel())
button = wx.Button(groundControl.getMainPanel(), label="Main")
groundControl.addToMainSizer(self.displayView)
groundControl.addToButtonSizer(button)
for key, matchup in groundControl.getMatchups().items():
print(key)
newPanel = MatchupPanel(self.displayView.getMainPanel())
newPanel.buttons["lineup"].SetName("{}".format(key))
newPanel.buttons["lineup"].Bind(wx.EVT_BUTTON, self.setLineups)
newPanel.buttons["sim"].SetName("{}".format(key))
newPanel.buttons["sim"].Bind(wx.EVT_BUTTON, self.runSim)
self.displayView.addMatchupPanel(newPanel)
matchup.registerChangeObserver(newPanel)
matchup.registerOnSetObserver(newPanel)
def setLineups(self, event):
gameId = event.GetEventObject().GetName()
lineupSetter = LineupSetter(self.groundControl)
matchup = self.groundControl.getMatchups(gameId)
lineupSetter.setView(matchup.getInfo())
lineupSetter.show()
def runSim(self, event):
object = event.GetEventObject()
simSetter = SimulationSetter(self.groundControl, object.GetName())
simSetter.show()
################################################################################
################################################################################
| 38.196429 | 91 | 0.542777 |
dd643434701dcd41a6b997bdb173b8e66a4492c4 | 565 | py | Python | Python Tkinter How To Use HTML In Your Tkinter App/HowToUseHTMLInYourTkinterApp.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 20 | 2020-08-19T23:27:01.000Z | 2022-02-03T12:02:17.000Z | Python Tkinter How To Use HTML In Your Tkinter App/HowToUseHTMLInYourTkinterApp.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 1 | 2021-04-10T18:06:05.000Z | 2021-04-10T18:06:05.000Z | Python Tkinter How To Use HTML In Your Tkinter App/HowToUseHTMLInYourTkinterApp.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 2 | 2020-12-03T19:35:36.000Z | 2021-11-10T14:58:39.000Z | from tkinter import *
from tkhtmlview import HTMLLabel
root = Tk()
root.title("Python Tkinter How To Use HTML In Your Tkinter App")
root.geometry("500x600")
my_label = HTMLLabel(root, html="\
<h1>\
<a href='https://www.python.org/'>Download Python</a>\
</h1>\
<ul>\
<li>One </li>\
<li>Two </li>\
<li>three </li>\
</ul>\
<img src='https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTr9vl91PpaNrX7_EiOaWpTyftYzOFEiQixBA&usqp=CAU'> ")
my_label.pack(pady=20,
padx=20,
fill = "both",
expand=True)
root.mainloop() | 20.925926 | 118 | 0.638938 |
dd47f0427921e5f9cf6263f043f88af4a089d5b2 | 1,751 | py | Python | misc/doc/sources/interception/sc_custom_message_id.py | 2naive/jasmin | 7609a50ded4ebf5873b607cb4a500be4b1be6be1 | [
"Apache-2.0"
] | null | null | null | misc/doc/sources/interception/sc_custom_message_id.py | 2naive/jasmin | 7609a50ded4ebf5873b607cb4a500be4b1be6be1 | [
"Apache-2.0"
] | null | null | null | misc/doc/sources/interception/sc_custom_message_id.py | 2naive/jasmin | 7609a50ded4ebf5873b607cb4a500be4b1be6be1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"External URL controlled routing and custom message_id"
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import logging
import requests
import urllib
import json
# Set logger
logger = logging.getLogger('logging-example')
if len(logger.handlers) != 1:
hdlr = logging.FileHandler('/var/log/jasmin/interception-mt.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
#logger.info('v4 PDU: %s' % routable.pdu)
destination_addr = str( routable.pdu.params['destination_addr'] )
source_addr = str( routable.pdu.params['source_addr'] )
short_message = routable.pdu.params['short_message'].decode('utf-16-be').encode('utf-8')
if 'message_payload' in routable.pdu.params:
short_message = routable.pdu.params['message_payload'].decode('utf-16-be').encode('utf-8')
routable.pdu.params['short_message'] = routable.pdu.params['message_payload']
if 'button_text' in routable.pdu.params:
routable.pdu.params['button_text'] = routable.pdu.params['button_text'].encode('cp1251')
del routable.pdu.params['message_payload']
# Register request and get message id
headers = {'User-Agent': 'Interceptor-MT'}
link = ("http://api.greensms.ru/?user="+routable.user.uid+"&to="+destination_addr+"&from="+urllib.quote_plus(source_addr)+"&txt="+urllib.quote_plus(short_message) )
response = requests.get(link, headers=headers)
result_json = json.loads(response.content)
uuid = str(result_json['request_id'])
smsc = str(result_json['smsc'])
# Setting message_id for later DLR identifying
routable.pdu.params['message_id'] = uuid
# Add routing tag base on external service
routable.addTag(smsc) | 38.065217 | 164 | 0.741862 |
2f701918514c565a5de504d86f6d6520039298ed | 1,204 | py | Python | ch04/q4-05.py | iamnicoj/ctci | f71f995cb3d3257d3d58f1f167fcab8eaf84d457 | [
"MIT"
] | null | null | null | ch04/q4-05.py | iamnicoj/ctci | f71f995cb3d3257d3d58f1f167fcab8eaf84d457 | [
"MIT"
] | 3 | 2021-03-19T14:35:27.000Z | 2021-03-20T16:12:34.000Z | ch04/q4-05.py | iamnicoj/ctci | f71f995cb3d3257d3d58f1f167fcab8eaf84d457 | [
"MIT"
] | null | null | null | from avl_tree import avl_tree
from random import randint
def validate_bst(tree):
if tree.head is None:
return True
return _recursive_validate(tree.head.left, tree.head.data, True) and _recursive_validate(tree.head.right, tree.head.data, False)
def _recursive_validate(node, parent_value, is_left):
if node is None:
return True
if is_left and node.data > parent_value:
return False
if not is_left and node.data < parent_value:
return False
return _recursive_validate(node.left, node.data, True) and _recursive_validate(node.right, node.data, False)
def validate_bst_2(tree):
return _inner_v_2(tree.head)
def _inner_v_2(node, min = None, max = None):
if not node:
return True
if min and node.data < min or max and node.data > max:
return False
return _inner_v_2(node.left, min, node.data) and _inner_v_2(node.right, node.data, max)
#########################
my_tree = avl_tree()
for _ in range(0, 100):
my_tree.add(randint(0,100))
#########################
my_tree.print_avl_tree()
print(validate_bst_2(my_tree))
my_tree.head.right.left.data = 4
print(validate_bst_2(my_tree))
| 24.08 | 133 | 0.668605 |
c0020a5c48f2c2c3f5b6f0d2ae2d3b3f3f939485 | 10,005 | py | Python | test/test_export.py | GillesBouissac/blender-amf | 3a729b60dec7b7d0ab08ce82cadafd3c7f370e31 | [
"MIT"
] | 3 | 2021-06-16T07:54:50.000Z | 2022-03-31T09:44:30.000Z | test/test_export.py | GillesBouissac/blender-amf | 3a729b60dec7b7d0ab08ce82cadafd3c7f370e31 | [
"MIT"
] | null | null | null | test/test_export.py | GillesBouissac/blender-amf | 3a729b60dec7b7d0ab08ce82cadafd3c7f370e31 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2020 Gilles Bouissac
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# <pep8 compliant>
import os
import sys
import fakebpy
import tempfile
import pytest
import xmlschema
import traceback
import bpy
from unittest.mock import Mock
from zipfile import ZipFile
from io_mesh_amf import ExportAMF
from io_mesh_amf.amf_util import AMFExport, Group, flatten
def with_obj_names(objs):
if type(objs) not in (list, Group):
return objs.name
obj_list = objs
if type(objs) is Group:
obj_list = objs.objects
res = []
for obj in obj_list:
res.append(with_obj_names(obj))
return res
class Test_build_groups():
""" Verifications of build_groups method """
def test_none(self, export, empty_012):
# Prepare
export.group_strategy = 'none'
bpy.data.objects = flatten(empty_012)
# Test
groups = export.build_groups(flatten(empty_012))
# Check
self._check_groups(groups, [
['cube_0'], ['cube_1'], ['cube_2'],
['empty_0'], ['empty_1'], ['empty_2'],
['empty_12'], ['empty_012']
])
def test_all(self, export, empty_012):
# Prepare
export.group_strategy = 'all'
bpy.data.objects = flatten(empty_012)
# Test
groups = export.build_groups(flatten(empty_012))
# Check
self._check_groups(groups, [[
'cube_0', 'cube_1', 'cube_2',
'empty_0', 'empty_1', 'empty_2',
'empty_12', 'empty_012'
]])
def test_parents_selected(self, export, empty_0, empty_12, empty_012):
# Prepare
export.group_strategy = 'parents_selected'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_0)
objs.extend(flatten(empty_12))
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
['cube_0', 'empty_0'],
['cube_1', 'cube_2', 'empty_1', 'empty_2', 'empty_12']
])
def test_parents_visible(self, export, empty_0, empty_2, empty_012):
# Prepare
export.group_strategy = 'parents_visible'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_0)
objs.extend(flatten(empty_2))
print(f"objs = {with_obj_names(objs)}")
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
# In the tree from empty_012 only those are in the selection
['empty_0', 'cube_0', 'empty_2', 'cube_2']
])
def test_parents_viewable(self, export, empty_0, empty_1, empty_012):
# Prepare
export.group_strategy = 'parents_viewable'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_0)
objs.extend(flatten(empty_1))
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
# The first viewable parent is empty_1 making a group
['cube_1', 'empty_1'],
['cube_0'],
['empty_0']
])
def test_parents_renderable(self, export, empty_0, empty_2, empty_012):
# Prepare
export.group_strategy = 'parents_renderable'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_0)
objs.extend(flatten(empty_2))
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
['cube_0', 'empty_0'],
['cube_2', 'empty_2']
])
def test_parents_any(self, export, empty_0, empty_2, empty_012):
# Prepare
export.group_strategy = 'parents_any'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_0)
objs.extend(flatten(empty_2))
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
['cube_0', 'empty_0', 'cube_2', 'empty_2']
])
def test_no_group_from_parent(self, export, empty_2, empty_012):
# Prepare
export.group_strategy = 'parents_viewable'
bpy.data.objects = flatten(empty_012)
# Test
objs = flatten(empty_2)
groups = export.build_groups(objs)
# Check
self._check_groups(groups, [
['cube_2'],
['empty_2']
])
def _group_equals(self, group, groupref):
for obj in group:
found = False
for objref in groupref:
if obj == objref:
groupref.remove(objref)
found = True
break
if not found:
return False
return len(groupref) == 0
def _check_groups(self, groups, groupsref):
groupscheck = with_obj_names(groups)
print(f"Groups generated: {groupscheck}")
print(f"Groups expected: {groupsref}")
for group in groupscheck:
if type(group) is not list:
group = [group]
found = False
for groupref in groupsref:
if self._group_equals(group, groupref):
groupsref.remove(groupref)
found = True
break
if not found:
grpstr = with_obj_names(group)
pytest.fail(f"group {grpstr} not found in {groupsref}")
if not len(groupsref) == 0:
pytest.fail(f"ref groups {groupsref} not found in {grpsstr}")
class Test_execute():
""" Verifications of full execution """
def check_archive(self, filepath):
""" Check filepath is a valid amf archive """
prefix = "test_export_check"
with tempfile.TemporaryDirectory(prefix=prefix) as extractDir:
with ZipFile(filepath, 'r') as amffile:
ziplist = amffile.infolist()
assert len(ziplist) == 1
zipxml = ziplist[0]
assert zipxml.is_dir() is False
xmlFile = amffile.extract(zipxml.filename, extractDir)
assert os.path.isfile(xmlFile) is True
try:
amf_schema = xmlschema.XMLSchema("test/amf.xsd")
amf_schema.validate(xmlFile)
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(f"Error exporting AMF file: {exc_value}")
traceback.print_tb(exc_traceback, file=sys.stdout)
pytest.fail(str(exc_value))
def test_execute_exception(self, context_cube):
# Test, don't define export.filepath
export = ExportAMF()
ret = export.execute(context_cube)
# Check
assert ret == {'CANCELLED'}
def test_execute_no_file(self, export, context_cube):
# Test
export.filepath = ""
ret = export.execute(context_cube)
# Check
assert ret == {'CANCELLED'}
def test_execute_single_object(self, export, context, empty_012):
# Prepare
export.export_strategy = "selection"
export.group_strategy = 'parents_selected'
context.selected_objects = flatten(empty_012)
print(f"selected_objects = {flatten(empty_012)}")
export.target_unit = 'inch'
# Test
ret = export.execute(context)
# Check
assert ret == {'FINISHED'}
context.selected_objects[0].to_mesh.assert_called_once_with()
self.check_archive(export.filepath)
def test_execute_all_selected_native(self, export, context, empty_012):
# Prepare
export.export_strategy = "selection"
export.group_strategy = 'all'
export.export_format = 'native'
context.selected_objects = flatten(empty_012)
# Test
ret = export.execute(context)
# Check
assert ret == {'FINISHED'}
for obj in context.selected_objects:
obj.to_mesh.assert_called()
if "cube" in obj.name:
obj.mesh_mock.calc_loop_triangles.assert_called()
else:
obj.mesh_mock.calc_loop_triangles.assert_not_called()
self.check_archive(export.filepath)
def test_execute_all_selected_slicer(self, export, context, empty_012):
# Prepare
export.export_strategy = "selection"
export.group_strategy = 'all'
export.export_format = 'slic3r'
context.selected_objects = flatten(empty_012)
# Test
ret = export.execute(context)
# Check
assert ret == {'FINISHED'}
for obj in context.selected_objects:
obj.to_mesh.assert_called()
if "cube" in obj.name:
obj.mesh_mock.calc_loop_triangles.assert_called()
else:
obj.mesh_mock.calc_loop_triangles.assert_not_called()
# we had to cheat with schema
self.check_archive(export.filepath)
| 34.860627 | 79 | 0.605097 |
2794dafc4e3855f316b0ef087b4a3d850b3fc805 | 2,650 | py | Python | nuitka/optimizations/BytecodeDemotion.py | dmikushin/Nuitka | 1e7e05257b9b39ec74be807bfdb0dec11a68e1ed | [
"Apache-2.0"
] | null | null | null | nuitka/optimizations/BytecodeDemotion.py | dmikushin/Nuitka | 1e7e05257b9b39ec74be807bfdb0dec11a68e1ed | [
"Apache-2.0"
] | null | null | null | nuitka/optimizations/BytecodeDemotion.py | dmikushin/Nuitka | 1e7e05257b9b39ec74be807bfdb0dec11a68e1ed | [
"Apache-2.0"
] | null | null | null | # Copyright 2019, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Demotion of compiled modules to bytecode modules.
"""
import marshal
from logging import info
from nuitka import Options
from nuitka.importing.ImportCache import isImportedModuleByName, replaceImportedModule
from nuitka.ModuleRegistry import replaceRootModule
from nuitka.nodes.ModuleNodes import makeUncompiledPythonModule
from nuitka.plugins.Plugins import Plugins
def demoteCompiledModuleToBytecode(module):
""" Demote a compiled module to uncompiled (bytecode).
"""
full_name = module.getFullName()
filename = module.getCompileTimeFilename()
if Options.isShowProgress() or Options.isDebug():
info("Demoting module '%s' to bytecode from '%s'." % (full_name, filename))
source_code = module.getSourceCode()
# Second chance for plugins to modify source code just before turning it
# to bytecode.
source_code = Plugins.onFrozenModuleSourceCode(
module_name=full_name, is_package=False, source_code=source_code
)
bytecode = compile(source_code, filename, "exec", dont_inherit=True)
bytecode = Plugins.onFrozenModuleBytecode(
module_name=full_name, is_package=False, bytecode=bytecode
)
uncompiled_module = makeUncompiledPythonModule(
module_name=full_name,
filename=filename,
bytecode=marshal.dumps(bytecode),
is_package=module.isCompiledPythonPackage(),
user_provided=True,
technical=False,
)
uncompiled_module.setUsedModules(module.getUsedModules())
module.finalize()
if isImportedModuleByName(full_name):
replaceImportedModule(old=module, new=uncompiled_module)
replaceRootModule(old=module, new=uncompiled_module)
from nuitka.plugins.PluginBase import isTriggerModule, replaceTriggerModule
if isTriggerModule(module):
replaceTriggerModule(old=module, new=uncompiled_module)
| 34.415584 | 86 | 0.739623 |
5ed880ff5f7c17a8e6b0538cfb7dff3c57e6f1a8 | 7,646 | py | Python | common/clr_keras_utils.py | CBIIT/NCI-DOE-Collab-Pilot1-Unified-Drug-Response-Predictor-Multi-tasking | 93ac7706741a22ef091fae3bf555d7271fe27bee | [
"MIT"
] | 51 | 2017-01-24T20:57:27.000Z | 2022-02-15T00:33:45.000Z | common/clr_keras_utils.py | CBIIT/NCI-DOE-Collab-Pilot1-Unified-Drug-Response-Predictor-Multi-tasking | 93ac7706741a22ef091fae3bf555d7271fe27bee | [
"MIT"
] | 59 | 2017-08-21T22:19:44.000Z | 2021-11-01T16:05:35.000Z | common/clr_keras_utils.py | CBIIT/NCI-DOE-Collab-Pilot1-Unified-Drug-Response-Predictor-Multi-tasking | 93ac7706741a22ef091fae3bf555d7271fe27bee | [
"MIT"
] | 90 | 2016-11-22T03:57:07.000Z | 2022-01-11T04:43:23.000Z | from tensorflow.keras.callbacks import Callback
from tensorflow.keras import backend as K
import numpy as np
def clr_check_args(args):
req_keys = ['clr_mode', 'clr_base_lr', 'clr_max_lr', 'clr_gamma']
keys_present = True
for key in req_keys:
if key not in args.keys():
keys_present = False
return keys_present
def clr_set_args(args):
req_keys = ['clr_mode', 'clr_base_lr', 'clr_max_lr', 'clr_gamma']
exclusive_keys = ['warmup_lr', 'reduce_lr']
keys_present = True
for key in req_keys:
if key not in args.keys():
keys_present = False
if keys_present and args['clr_mode'] is not None:
clr_keras_kwargs = {'mode': args['clr_mode'], 'base_lr': args['clr_base_lr'],
'max_lr': args['clr_max_lr'], 'gamma': args['clr_gamma']}
for ex_key in exclusive_keys:
if ex_key in args.keys():
if args[ex_key] is True:
print("Key ", ex_key, " conflicts, setting to False")
args[ex_key] = False
else:
print("Incomplete CLR specification: will run without")
clr_keras_kwargs = {'mode': None, 'base_lr': 0.1,
'max_lr': 0.1, 'gamma': 0.1}
return clr_keras_kwargs
def clr_callback(mode=None, base_lr=1e-4, max_lr=1e-3, gamma=0.999994):
""" Creates keras callback for cyclical learning rate. """
if mode == 'trng1':
clr = CyclicLR(base_lr=base_lr, max_lr=max_lr, mode='triangular')
elif mode == 'trng2':
clr = CyclicLR(base_lr=base_lr, max_lr=max_lr, mode='triangular2')
elif mode == 'exp':
clr = CyclicLR(base_lr=base_lr, max_lr=max_lr, mode='exp_range', gamma=gamma) # 0.99994; 0.99999994; 0.999994
return clr
class CyclicLR(Callback):
"""This callback implements a cyclical learning rate policy (CLR).
The method cycles the learning rate between two boundaries with
some constant frequency.
# Arguments
base_lr: initial learning rate which is the
lower boundary in the cycle.
max_lr: upper boundary in the cycle. Functionally,
it defines the cycle amplitude (max_lr - base_lr).
The lr at any cycle is the sum of base_lr
and some scaling of the amplitude; therefore
max_lr may not actually be reached depending on
scaling function.
step_size: number of training iterations per
half cycle. Authors suggest setting step_size
2-8 x training iterations in epoch.
mode: one of {triangular, triangular2, exp_range}.
Default 'triangular'.
Values correspond to policies detailed above.
If scale_fn is not None, this argument is ignored.
gamma: constant in 'exp_range' scaling function:
gamma**(cycle iterations)
scale_fn: Custom scaling policy defined by a single
argument lambda function, where
0 <= scale_fn(x) <= 1 for all x >= 0.
mode paramater is ignored
scale_mode: {'cycle', 'iterations'}.
Defines whether scale_fn is evaluated on
cycle number or cycle iterations (training
iterations since start of cycle). Default is 'cycle'.
The amplitude of the cycle can be scaled on a per-iteration or
per-cycle basis.
This class has three built-in policies, as put forth in the paper.
"triangular":
A basic triangular cycle w/ no amplitude scaling.
"triangular2":
A basic triangular cycle that scales initial amplitude by half each cycle.
"exp_range":
A cycle that scales initial amplitude by gamma**(cycle iterations) at each
cycle iteration.
For more detail, please see paper.
# Example for CIFAR-10 w/ batch size 100:
```python
clr = CyclicLR(base_lr=0.001, max_lr=0.006,
step_size=2000., mode='triangular')
model.fit(X_train, Y_train, callbacks=[clr])
```
Class also supports custom scaling functions:
```python
clr_fn = lambda x: 0.5*(1+np.sin(x*np.pi/2.))
clr = CyclicLR(base_lr=0.001, max_lr=0.006,
step_size=2000., scale_fn=clr_fn,
scale_mode='cycle')
model.fit(X_train, Y_train, callbacks=[clr])
```
# References
- [Cyclical Learning Rates for Training Neural Networks](
https://arxiv.org/abs/1506.01186)
"""
def __init__(
self,
base_lr=0.001,
max_lr=0.006,
step_size=2000.,
mode='triangular',
gamma=1.,
scale_fn=None,
scale_mode='cycle'):
super(CyclicLR, self).__init__()
if mode not in ['triangular', 'triangular2',
'exp_range']:
raise KeyError("mode must be one of 'triangular', "
"'triangular2', or 'exp_range'")
self.base_lr = base_lr
self.max_lr = max_lr
self.step_size = step_size
self.mode = mode
self.gamma = gamma
if scale_fn is None:
if self.mode == 'triangular':
self.scale_fn = lambda x: 1.
self.scale_mode = 'cycle'
elif self.mode == 'triangular2':
self.scale_fn = lambda x: 1 / (2.**(x - 1))
self.scale_mode = 'cycle'
elif self.mode == 'exp_range':
self.scale_fn = lambda x: gamma ** x
self.scale_mode = 'iterations'
else:
self.scale_fn = scale_fn
self.scale_mode = scale_mode
self.clr_iterations = 0.
self.trn_iterations = 0.
self.history = {}
self._reset()
def _reset(self, new_base_lr=None, new_max_lr=None,
new_step_size=None):
"""Resets cycle iterations.
Optional boundary/step size adjustment.
"""
if new_base_lr is not None:
self.base_lr = new_base_lr
if new_max_lr is not None:
self.max_lr = new_max_lr
if new_step_size is not None:
self.step_size = new_step_size
self.clr_iterations = 0.
def clr(self):
cycle = np.floor(1 + self.clr_iterations / (2 * self.step_size))
x = np.abs(self.clr_iterations / self.step_size - 2 * cycle + 1)
if self.scale_mode == 'cycle':
return self.base_lr + (self.max_lr - self.base_lr) * \
np.maximum(0, (1 - x)) * self.scale_fn(cycle)
else:
return self.base_lr + (self.max_lr - self.base_lr) * \
np.maximum(0, (1 - x)) * self.scale_fn(self.clr_iterations)
def on_train_begin(self, logs={}):
logs = logs or {}
if self.clr_iterations == 0:
K.set_value(self.model.optimizer.lr, self.base_lr)
else:
K.set_value(self.model.optimizer.lr, self.clr())
def on_batch_end(self, epoch, logs=None):
logs = logs or {}
self.trn_iterations += 1
self.clr_iterations += 1
K.set_value(self.model.optimizer.lr, self.clr())
self.history.setdefault(
'lr', []).append(
K.get_value(
self.model.optimizer.lr))
self.history.setdefault('iterations', []).append(self.trn_iterations)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
| 37.665025 | 118 | 0.582396 |
3f4795dbe44394c551da78077c21d2a78916c96d | 1,369 | py | Python | examples/BEMIO/CAPYTAINE/run_cases.py | srflp/WEC-Sim | 2820d31fd6d534f60d3b97ad9a0d4ba270f9a8e2 | [
"Apache-2.0"
] | 89 | 2015-01-09T01:45:25.000Z | 2022-01-20T15:45:11.000Z | examples/BEMIO/CAPYTAINE/run_cases.py | srflp/WEC-Sim | 2820d31fd6d534f60d3b97ad9a0d4ba270f9a8e2 | [
"Apache-2.0"
] | 731 | 2015-03-31T04:59:36.000Z | 2022-03-31T19:04:29.000Z | examples/BEMIO/CAPYTAINE/run_cases.py | srflp/WEC-Sim | 2820d31fd6d534f60d3b97ad9a0d4ba270f9a8e2 | [
"Apache-2.0"
] | 160 | 2015-01-06T21:36:53.000Z | 2022-03-28T09:10:28.000Z | # '-*- coding: utf-8 -*-
"""
Created on Wed Nov 18 11:32:31 2020
@author: akeeste
This script runs all of the Capytaine cases needed for the WEC-Sim bemio examples.
This script runs Capytaine, not BEMIO.
"""
import os
# num of problems in each case is nFrequencies*(nHeadings + nDof*nBodies)
# total: ~47,000 problems, ~10 cases/minute, 81 hours run time
# Single body cases --------------------------------------------------------#
print('Run sphere case') # 2940 problems, ~30 min (no b2b).
os.chdir("./sphere")
os.system('python sphere.py')
print('Run Ellipsoid case') # 2156 problems, ~30 min (no b2b).
os.chdir("../ellipsoid")
os.system('python ellipsoid.py')
print('Run Cylinder case') # 3675 problems, ~30 min (no b2b).
os.chdir("../cylinder")
os.system('python cylinder.py')
print('Run Coer_Comp case') # 12800 problems, ~7 hours (no b2b).
os.chdir("../coer_comp")
os.system('python coer_comp.py')
# # Multiple body cases ------------------------------------------------------#
print('Run Cubes case') # 11000 problems, 8.5 hours (new b2b)
os.chdir("../cubes")
os.system('python cubes.py')
print('Run RM3 case') # 3380 problems, 21 hours (new b2b)
os.chdir("../rm3")
os.system('python rm3.py')
print('Run OSWEC case') # 11000 problems, 43 hours (new b2b)
os.chdir("../oswec")
os.system('python oswec.py')
print('All simulations complete.')
| 26.326923 | 82 | 0.631118 |
43247d091613c6fbfb5dcccc4b4ea0d1cf312658 | 1,091 | py | Python | runtests.py | gjost/django-linkpile | a3b12f3cfc13b20fe74aa38ae1a669b91ecf303d | [
"BSD-3-Clause"
] | 4 | 2017-10-12T16:01:45.000Z | 2021-05-23T22:38:13.000Z | runtests.py | gjost/django-linkpile | a3b12f3cfc13b20fe74aa38ae1a669b91ecf303d | [
"BSD-3-Clause"
] | 2 | 2018-06-25T17:09:26.000Z | 2020-01-21T20:59:33.000Z | runtests.py | gjost/django-linkpile | a3b12f3cfc13b20fe74aa38ae1a669b91ecf303d | [
"BSD-3-Clause"
] | null | null | null | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="linkpile.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"linkpile",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:]) | 20.980769 | 85 | 0.571036 |
af5ce1c211d6cd05d3f1f641ed4a340e032648ba | 475 | py | Python | examples/src/autodockerization/main.py | aniketmaurya/Chitra | e040311c25ccf2e101df5596662450ae532bee08 | [
"Apache-2.0"
] | 34 | 2022-01-01T08:18:42.000Z | 2022-03-27T16:02:22.000Z | examples/src/autodockerization/main.py | aniketmaurya/Chitra | e040311c25ccf2e101df5596662450ae532bee08 | [
"Apache-2.0"
] | 20 | 2021-12-31T14:52:05.000Z | 2022-03-26T20:57:52.000Z | examples/src/autodockerization/main.py | aniketmaurya/Chitra | e040311c25ccf2e101df5596662450ae532bee08 | [
"Apache-2.0"
] | 6 | 2022-01-12T23:38:19.000Z | 2022-03-10T16:03:15.000Z | from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline
from chitra.serve import create_api
tokenizer = AutoTokenizer.from_pretrained("microsoft/xtremedistil-l6-h256-uncased")
model = AutoModelForSequenceClassification.from_pretrained("microsoft/xtremedistil-l6-h256-uncased")
classifier = pipeline("text-classification", model=model, tokenizer=tokenizer)
api = create_api(classifier, run=False, api_type="text-classification")
app = api.app
| 39.583333 | 100 | 0.835789 |
edce05eae7cbe4eac22f510dd0d35cc5d4667545 | 257 | py | Python | horast/__init__.py | renesugar/horast | 1f3cca6f03cbe82303655cd8e2f35fa96a820f6a | [
"Apache-2.0"
] | 12 | 2018-12-09T10:45:21.000Z | 2022-01-08T03:32:26.000Z | horast/__init__.py | renesugar/horast | 1f3cca6f03cbe82303655cd8e2f35fa96a820f6a | [
"Apache-2.0"
] | 6 | 2019-07-23T07:25:11.000Z | 2020-10-26T07:52:07.000Z | horast/__init__.py | renesugar/horast | 1f3cca6f03cbe82303655cd8e2f35fa96a820f6a | [
"Apache-2.0"
] | 5 | 2019-02-26T05:59:58.000Z | 2020-04-24T15:22:23.000Z | """horast: Human-oriented abstract syntax tree parser and unparser."""
from static_typing import dump
from .parser import parse
from .unparser import unparse
from .ast_validator import AstValidator
__all__ = ['dump', 'parse', 'unparse', 'AstValidator']
| 23.363636 | 70 | 0.766537 |
cc1a7116372c7715b03953fddd43dee12131c66c | 1,512 | py | Python | aMLpy/plot_crossmatch.py | dirac-institute/SPSAS2019 | 34781162bb088b17c1b9afb69eee2284dce6bdec | [
"MIT"
] | 15 | 2019-07-31T07:18:49.000Z | 2021-06-03T23:41:59.000Z | aMLpy/plot_crossmatch.py | dirac-institute/SPSAS2019 | 34781162bb088b17c1b9afb69eee2284dce6bdec | [
"MIT"
] | null | null | null | aMLpy/plot_crossmatch.py | dirac-institute/SPSAS2019 | 34781162bb088b17c1b9afb69eee2284dce6bdec | [
"MIT"
] | 6 | 2019-08-01T19:33:16.000Z | 2020-08-27T13:01:15.000Z | """
Catalog cross-matching
----------------------
This plots the cross-matched samples between the SDSS imaging data and
the SDSS Stripe 82 standard stars.
"""
# Author: Jake VanderPlas <vanderplas@astro.washington.edu>
# License: BSD
# The figure is an example from astroML: see http://astroML.github.com
import os
import sys
from time import time
import numpy as np
from matplotlib import pyplot as plt
from astroML.datasets import fetch_imaging_sample, fetch_sdss_S82standards
from astroML.crossmatch import crossmatch
from astroML.plotting import hist
# get imaging data
image_data = fetch_imaging_sample()
imX = np.empty((len(image_data), 2), dtype=np.float64)
imX[:, 0] = image_data['ra']
imX[:, 1] = image_data['dec']
# get standard stars
standards_data = fetch_sdss_S82standards()
stX = np.empty((len(standards_data), 2), dtype=np.float64)
stX[:, 0] = standards_data['RA']
stX[:, 1] = standards_data['DEC']
# crossmatch catalogs
max_radius = 1. / 3600 # 1 arcsec
dist, ind = crossmatch(imX, stX, max_radius)
match = ~np.isinf(dist)
dist_match = dist[match]
dist_match *= 3600
ax = plt.axes()
hist(dist_match, bins='knuth', ax=ax,
histtype='stepfilled', ec='k', fc='#AAAAAA')
ax.set_xlabel('radius of match (arcsec)')
ax.set_ylabel('N(r, r+dr)')
ax.text(0.95, 0.95,
"Total objects: %i\nNumber with match: %i" % (imX.shape[0],
np.sum(match)),
ha='right', va='top', transform=ax.transAxes)
ax.set_xlim(0, 0.2)
plt.show()
| 28.528302 | 74 | 0.683201 |
e1e2c6326933eadce337864f1b35664f36257bd0 | 920 | py | Python | django_cropper_image/config.py | aneesh2usman/django_cropper_image | eb36e31979954009a47a77c701b58e842acaa137 | [
"MIT"
] | 19 | 2021-06-13T07:32:57.000Z | 2021-08-01T10:12:23.000Z | django_cropper_image/config.py | aneesh2usman/django_cropper_image | eb36e31979954009a47a77c701b58e842acaa137 | [
"MIT"
] | null | null | null | django_cropper_image/config.py | aneesh2usman/django_cropper_image | eb36e31979954009a47a77c701b58e842acaa137 | [
"MIT"
] | 3 | 2021-06-22T06:04:11.000Z | 2021-07-08T13:07:17.000Z |
from django.conf import settings
class CropperSettings(object):
@property
def default(self):
settings_default ={
'EXCLUDE_CROPPERJS':False,
'CROPPERJS_STATIC_JS':'django_cropper_image/js/cropper.min.js',
'CROPPERJS_STATIC_CSS':'django_cropper_image/css/cropper.min.css',
'CUSTOM_STATIC_JS':'django_cropper_image/js/image_cropper.min.js',
'CUSTOM_STATIC_CSS':'django_cropper_image/css/image_cropper.css',
'TEMPLATES':'django_cropper_image/image_cropper_input.html',
}
return settings_default
def get(self,name):
settings_default =None
try:
if hasattr(settings, 'DJANGO_CROPPER_IMAGE_SETTINGS'):
settings_default = settings.DJANGO_CROPPER_IMAGE_SETTINGS.get(name,self.default.get(name))
else:
settings_default = self.default.get(name)
except Exception as e:
ValueError(f'{name} not found')
return settings_default
def __call__(self,name):
return self.get(name)
| 30.666667 | 94 | 0.768478 |
aafd8c10a1269183922100d79e501520f625c902 | 1,583 | py | Python | Software/Python/grove_i2c_color_sensor/color_sensor_example.py | benmcclelland/GrovePi | 1e48137b6007ffd4ce430e821d2aa744349e362a | [
"MIT"
] | 482 | 2015-01-09T03:06:14.000Z | 2022-03-24T10:05:07.000Z | Software/Python/grove_i2c_color_sensor/color_sensor_example.py | benmcclelland/GrovePi | 1e48137b6007ffd4ce430e821d2aa744349e362a | [
"MIT"
] | 257 | 2015-01-13T14:08:17.000Z | 2022-01-20T08:43:50.000Z | Software/Python/grove_i2c_color_sensor/color_sensor_example.py | benmcclelland/GrovePi | 1e48137b6007ffd4ce430e821d2aa744349e362a | [
"MIT"
] | 510 | 2015-01-27T17:15:44.000Z | 2022-03-29T01:27:13.000Z | import time
import grove_i2c_color_sensor
# Open connection to sensor
color_sensor = grove_i2c_color_sensor.GroveI2CColorSensor()
# Perform continuous integration with predefined duration of 100ms
color_sensor.use_continuous_integration(100)
# Set gain to 16x
color_sensor.set_gain_and_prescaler(16)
# Start integration
color_sensor.start_integration()
time.sleep(.1)
if color_sensor.is_integration_complete():
print ("Continuous integration complete. Read color:")
color = color_sensor.read_rgbc()
print("RGB: {},{},{} - Clear {}".format(color[0], color[1], color[2], color[3]))
color = color_sensor.read_xy()
print("xy: {},{}".format(color[0], color[1]))
color = color_sensor.read_color_name()
print("Closest color match: {}".format(color))
else:
print("Continuous integration incomplete")
# Stop integration before changing settings
color_sensor.stop_integration()
# Perform manual integration
color_sensor.use_manual_integration()
# Set gain to 4x
color_sensor.set_gain_and_prescaler(4)
# Integrate during 200ms
color_sensor.start_integration()
time.sleep(0.2)
color_sensor.stop_integration()
if color_sensor.is_integration_complete():
print ("Manual integration complete. Read color:")
color = color_sensor.read_rgbc()
print("RGB: {},{},{} - Clear {}".format(color[0], color[1], color[2], color[3]))
color = color_sensor.read_xy()
print("xy: {},{}".format(color[0], color[1]))
color = color_sensor.read_color_name()
print("Closest color match: {}".format(color))
else:
print("Manual integration incomplete")
| 32.979167 | 84 | 0.738471 |
bb83136c6388644fd41221d3158faa33acdbd63c | 2,320 | py | Python | tests/nemo_text_processing/test_time.py | agemagician/NeMo | 5839aee402f314aa413b28e9042b1e1cac10a114 | [
"Apache-2.0"
] | null | null | null | tests/nemo_text_processing/test_time.py | agemagician/NeMo | 5839aee402f314aa413b28e9042b1e1cac10a114 | [
"Apache-2.0"
] | null | null | null | tests/nemo_text_processing/test_time.py | agemagician/NeMo | 5839aee402f314aa413b28e9042b1e1cac10a114 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nemo_text_processing.inverse_text_normalization.inverse_normalize import InverseNormalizer
from nemo_text_processing.text_normalization.normalize import Normalizer
from nemo_text_processing.text_normalization.normalize_with_audio import NormalizerWithAudio
from parameterized import parameterized
from utils import PYNINI_AVAILABLE, parse_test_case_file
class TestTime:
inverse_normalizer = InverseNormalizer() if PYNINI_AVAILABLE else None
@parameterized.expand(parse_test_case_file('data_inverse_text_normalization/test_cases_time.txt'))
@pytest.mark.skipif(
not PYNINI_AVAILABLE, reason="`pynini` not installed, please install via nemo_text_processing/setup.sh"
)
@pytest.mark.run_only_on('CPU')
@pytest.mark.unit
def test_denorm(self, test_input, expected):
pred = self.inverse_normalizer.inverse_normalize(test_input, verbose=False)
assert pred == expected
normalizer = Normalizer(input_case='lower_cased') if PYNINI_AVAILABLE else None
normalizer_with_audio = NormalizerWithAudio(input_case='cased') if PYNINI_AVAILABLE else None
@parameterized.expand(parse_test_case_file('data_text_normalization/test_cases_time.txt'))
@pytest.mark.skipif(
not PYNINI_AVAILABLE, reason="`pynini` not installed, please install via nemo_text_processing/setup.sh"
)
@pytest.mark.run_only_on('CPU')
@pytest.mark.unit
def test_norm(self, test_input, expected):
pred = self.normalizer.normalize(test_input, verbose=False)
assert pred == expected
pred_non_deterministic = self.normalizer_with_audio.normalize(test_input, n_tagged=100)
assert expected in pred_non_deterministic
| 46.4 | 111 | 0.781034 |
bd5591d570af9ebfde3000d80aee1796d7c48b5c | 953 | py | Python | seleniumcrawler/sites/reddit/reddit.py | Goku0858756/selenium-crawler | e49df4c1f40a330af19d90b7c59d49ac1acee86c | [
"MIT"
] | 83 | 2015-10-07T06:34:45.000Z | 2022-03-09T06:26:54.000Z | seleniumcrawler/sites/reddit/reddit.py | corywalker/selenium-crawler | e49df4c1f40a330af19d90b7c59d49ac1acee86c | [
"MIT"
] | null | null | null | seleniumcrawler/sites/reddit/reddit.py | corywalker/selenium-crawler | e49df4c1f40a330af19d90b7c59d49ac1acee86c | [
"MIT"
] | 29 | 2015-12-04T13:08:16.000Z | 2021-07-15T23:15:36.000Z | from selenium import webdriver
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
def get_profile():
# get the Firefox profile object
firefoxProfile = FirefoxProfile()
# Disable CSS
firefoxProfile.set_preference('permissions.default.stylesheet', 2)
# Disable images
firefoxProfile.set_preference('permissions.default.image', 2)
# Disable Flash
firefoxProfile.set_preference('dom.ipc.plugins.enabled.libflashplayer.so',
'false')
return firefoxProfile
def handle_link(link):
profile = get_profile()
# Set the modified profile while creating the browser object
driver = webdriver.Firefox(profile)
driver.implicitly_wait(30)
driver.get(link)
driver.find_element_by_xpath("//p[@class='title']/a").click()
results = {
'url': driver.current_url,
'source': driver.page_source
}
driver.quit()
return results
| 28.029412 | 78 | 0.691501 |
69a8fa57d12bfc7eeadde5db251a861075641f69 | 3,503 | py | Python | redkyn-grader/grader/test/test_new.py | redkyn/grader | 51f152bb5c5d9db156e5fcf7d548bc51d40cd7d1 | [
"MIT"
] | 13 | 2016-08-02T20:43:03.000Z | 2021-09-03T04:09:34.000Z | redkyn-grader/grader/test/test_new.py | redkyn/grader | 51f152bb5c5d9db156e5fcf7d548bc51d40cd7d1 | [
"MIT"
] | 29 | 2016-03-06T23:32:50.000Z | 2019-01-27T00:52:47.000Z | redkyn-grader/grader/test/test_new.py | redkyn/grader | 51f152bb5c5d9db156e5fcf7d548bc51d40cd7d1 | [
"MIT"
] | 2 | 2017-01-27T19:17:09.000Z | 2017-02-02T23:25:00.000Z | import os
import pytest
import yaml
from grader.models.gradesheet import GradeSheetError
from grader.models.config import ConfigValidationError
def test_new_without_repo(parse_and_run):
"""Test vanilla assignment initialization
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "assignment1"])
a_path = os.path.join(path, "assignments", "assignment1")
gs_path = os.path.join(a_path, "gradesheet")
# Assignment directory exists
assert os.path.exists(a_path)
# Assignment directory's contents exist
assert os.path.exists(gs_path)
assert os.path.exists(os.path.join(a_path, "submissions"))
assert os.path.exists(os.path.join(a_path, "results"))
# Gradesheet directory's contents exist
assert os.path.exists(os.path.join(gs_path, "assignment.yml"))
assert os.path.exists(os.path.join(gs_path, "Dockerfile"))
with open(os.path.join(gs_path, "assignment.yml")) as f:
config = yaml.safe_load(f)
assert config['assignment-name'] == 'assignment1'
def test_new_with_repo(parse_and_run):
"""Test assignment initialization from existing repository
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "assignment1",
"https://github.com/michaelwisely/python-gradesheet.git"])
a_path = os.path.join(path, "assignments", "assignment1")
gs_path = os.path.join(a_path, "gradesheet")
# Assignment directory exists
assert os.path.exists(a_path)
# Assignment directory's contents exist
assert os.path.exists(gs_path)
assert os.path.exists(os.path.join(a_path, "submissions"))
assert os.path.exists(os.path.join(a_path, "results"))
# Gradesheet directory's contents exist
assert os.path.exists(os.path.join(gs_path, "assignment.yml"))
assert os.path.exists(os.path.join(gs_path, "Dockerfile"))
with open(os.path.join(gs_path, "assignment.yml")) as f:
config = yaml.safe_load(f)
# Now it's equal to the value from the repo
assert config['assignment-name'] == 'new-python-assignment'
def test_new_existing_assignment(parse_and_run):
"""Test overwriting an existing an assignment
"""
parse_and_run(["init", "cpl"])
parse_and_run(["new", "assignment1"])
with pytest.raises(FileExistsError):
parse_and_run(["new", "assignment1"])
def test_new_failed_clone(parse_and_run):
"""Test cloning a bogus repository
"""
path = parse_and_run(["init", "cpl"])
with pytest.raises(GradeSheetError):
parse_and_run(["new", "assignment1",
"git://github.com/michaelwisely/nope.git"])
a_path = os.path.join(path, "assignments", "assignment1")
assert not os.path.exists(a_path)
def test_new_bad_assignment_name(parse_and_run):
"""Test creating an assignment with a bad name
"""
path = parse_and_run(["init", "cpl"])
with pytest.raises(ConfigValidationError):
parse_and_run(["new", "NOOO%%%OPE!"])
a_path = os.path.join(path, "assignments", "assignment1")
assert not os.path.exists(a_path)
def test_new_without_init(parse_and_run):
"""Test creating an assignment without a grader-wide config
"""
path = parse_and_run(["init", "cpl"])
os.remove(os.path.join(path, "grader.yml"))
assert os.listdir(path) == []
with pytest.raises(SystemExit):
parse_and_run(["new", "a1"])
a_path = os.path.join(path, "assignments", "assignment1")
assert not os.path.exists(a_path)
| 31 | 77 | 0.682843 |
1a392f31a6552f4631d386610b4dfe296ebb217c | 26,798 | py | Python | rtox/fabric/network.py | pycontribs/rtox | d60706f49ce12984a8bfd9689acb2410e6ff6299 | [
"Apache-2.0"
] | 6 | 2018-02-18T14:46:29.000Z | 2021-04-08T11:26:52.000Z | rtox/fabric/network.py | pycontribs/rtox | d60706f49ce12984a8bfd9689acb2410e6ff6299 | [
"Apache-2.0"
] | 15 | 2018-02-18T14:47:07.000Z | 2021-06-25T15:15:08.000Z | rtox/fabric/network.py | pycontribs/rtox | d60706f49ce12984a8bfd9689acb2410e6ff6299 | [
"Apache-2.0"
] | 2 | 2018-07-13T11:48:27.000Z | 2020-06-24T20:49:47.000Z | """
Classes and subroutines dealing with network connections and related topics.
"""
from functools import wraps
import getpass
import os
import re
import time
import six
import socket
import sys
from rtox.fabric.auth import get_password, set_password
from rtox.fabric.utils import handle_prompt_abort, warn
from rtox.fabric.exceptions import NetworkError
try:
import warnings
warnings.simplefilter('ignore', DeprecationWarning)
import paramiko as ssh
except ImportError:
import traceback
traceback.print_exc()
msg = """
There was a problem importing our SSH library (see traceback above).
Please make sure all dependencies are installed and importable.
""".rstrip()
sys.stderr.write(msg + '\n')
sys.exit(1)
ipv6_regex = re.compile(
r'^\[?(?P<host>[0-9A-Fa-f:]+(?:%[a-z]+\d+)?)\]?(:(?P<port>\d+))?$')
def direct_tcpip(client, host, port):
return client.get_transport().open_channel(
'direct-tcpip',
(host, int(port)),
('', 0)
)
def is_key_load_error(e):
return (
e.__class__ is ssh.SSHException
and 'Unable to parse key file' in str(e)
)
def _tried_enough(tries):
from rtox.fabric.state import env
return tries >= env.connection_attempts
def get_gateway(host, port, cache, replace=False):
"""
Create and return a gateway socket, if one is needed.
This function checks ``env`` for gateway or proxy-command settings and
returns the necessary socket-like object for use by a final host
connection.
:param host:
Hostname of target server.
:param port:
Port to connect to on target server.
:param cache:
A ``HostConnectionCache`` object, in which gateway ``SSHClient``
objects are to be retrieved/cached.
:param replace:
Whether to forcibly replace a cached gateway client object.
:returns:
A ``socket.socket``-like object, or ``None`` if none was created.
"""
from rtox.fabric.state import env, output
sock = None
proxy_command = ssh_config().get('proxycommand', None)
if env.gateway:
gateway = normalize_to_string(env.gateway)
# ensure initial gateway connection
if replace or gateway not in cache:
if output.debug:
print("Creating new gateway connection to %r" % gateway)
cache[gateway] = connect(*normalize(gateway) + (cache, False))
# now we should have an open gw connection and can ask it for a
# direct-tcpip channel to the real target. (bypass cache's own
# __getitem__ override to avoid hilarity - this is usually called
# within that method.)
sock = direct_tcpip(dict.__getitem__(cache, gateway), host, port)
elif proxy_command:
sock = ssh.ProxyCommand(proxy_command)
return sock
class HostConnectionCache(dict):
"""
Dict subclass allowing for caching of host connections/clients.
This subclass will intelligently create new client connections when keys
are requested, or return previously created connections instead.
It also handles creating new socket-like objects when required to implement
gateway connections and `ProxyCommand`, and handing them to the inner
connection methods.
Key values are the same as host specifiers throughout Fabric: optional
username + ``@``, mandatory hostname, optional ``:`` + port number.
Examples:
* ``example.com`` - typical Internet host address.
* ``firewall`` - atypical, but still legal, local host address.
* ``user@example.com`` - with specific username attached.
* ``bob@smith.org:222`` - with specific nonstandard port attached.
When the username is not given, ``env.user`` is used. ``env.user``
defaults to the currently running user at startup but may be overwritten by
user code or by specifying a command-line flag.
Note that differing explicit usernames for the same hostname will result in
multiple client connections being made. For example, specifying
``user1@example.com`` will create a connection to ``example.com``, logged
in as ``user1``; later specifying ``user2@example.com`` will create a new,
2nd connection as ``user2``.
The same applies to ports: specifying two different ports will result in
two different connections to the same host being made. If no port is given,
22 is assumed, so ``example.com`` is equivalent to ``example.com:22``.
"""
def connect(self, key):
"""
Force a new connection to ``key`` host string.
"""
from rtox.fabric.state import env
user, host, port = normalize(key)
key = normalize_to_string(key)
seek_gateway = True
# break the loop when the host is gateway itself
if env.gateway:
seek_gateway = normalize_to_string(env.gateway) != key
self[key] = connect(
user, host, port, cache=self, seek_gateway=seek_gateway)
def __getitem__(self, key):
"""
Autoconnect + return connection object
"""
key = normalize_to_string(key)
if key not in self:
self.connect(key)
return dict.__getitem__(self, key)
#
# Dict overrides that normalize input keys
#
def __setitem__(self, key, value):
return dict.__setitem__(self, normalize_to_string(key), value)
def __delitem__(self, key):
return dict.__delitem__(self, normalize_to_string(key))
def __contains__(self, key):
return dict.__contains__(self, normalize_to_string(key))
def ssh_config(host_string=None):
"""
Return ssh configuration dict for current env.host_string host value.
Memoizes the loaded SSH config file, but not the specific per-host results.
This function performs the necessary "is SSH config enabled?" checks and
will simply return an empty dict if not. If SSH config *is* enabled and the
value of env.ssh_config_path is not a valid file, it will abort.
May give an explicit host string as ``host_string``.
"""
from rtox.fabric.state import env
dummy = {}
if not env.use_ssh_config:
return dummy
if '_ssh_config' not in env:
try:
conf = ssh.SSHConfig()
path = os.path.expanduser(env.ssh_config_path)
with open(path) as fd:
conf.parse(fd)
env._ssh_config = conf
except IOError:
warn("Unable to load SSH config file '%s'" % path)
return dummy
host = parse_host_string(host_string or env.host_string)['host']
return env._ssh_config.lookup(host)
def key_filenames():
"""
Returns list of SSH key filenames for the current env.host_string.
Takes into account ssh_config and env.key_filename, including normalization
to a list. Also performs ``os.path.expanduser`` expansion on any key
filenames.
"""
from rtox.fabric.state import env
keys = env.key_filename
# For ease of use, coerce stringish key filename into list
if isinstance(env.key_filename, six.string_types) or env.key_filename is None:
keys = [keys]
# Strip out any empty strings (such as the default value...meh)
keys = list(filter(bool, keys))
# Honor SSH config
conf = ssh_config()
if 'identityfile' in conf:
# Assume a list here as we require Paramiko 1.10+
keys.extend(conf['identityfile'])
return list(map(os.path.expanduser, keys))
def key_from_env(passphrase=None):
"""
Returns a paramiko-ready key from a text string of a private key
"""
from rtox.fabric.state import env, output
if 'key' in env:
if output.debug:
# NOTE: this may not be the most secure thing; OTOH anybody running
# the process must by definition have access to the key value,
# so only serious problem is if they're logging the output.
sys.stderr.write("Trying to honor in-memory key %r\n" % env.key)
for pkey_class in (ssh.rsakey.RSAKey, ssh.dsskey.DSSKey):
if output.debug:
sys.stderr.write("Trying to load it as %s\n" % pkey_class)
try:
return pkey_class.from_private_key(six.StringIO(env.key), passphrase)
except Exception as e:
# File is valid key, but is encrypted: raise it, this will
# cause cxn loop to prompt for passphrase & retry
if 'Private key file is encrypted' in str(e):
raise
# Otherwise, it probably means it wasn't a valid key of this
# type, so try the next one.
else:
pass
def parse_host_string(host_string):
# Split host_string to user (optional) and host/port
user_hostport = host_string.rsplit('@', 1)
hostport = user_hostport.pop()
user = user_hostport[0] if user_hostport and user_hostport[0] else None
# Split host/port string to host and optional port
# For IPv6 addresses square brackets are mandatory for host/port separation
if hostport.count(':') > 1:
# Looks like IPv6 address
r = ipv6_regex.match(hostport).groupdict()
host = r['host'] or None
port = r['port'] or None
else:
# Hostname or IPv4 address
host_port = hostport.rsplit(':', 1)
host = host_port.pop(0) or None
port = host_port[0] if host_port and host_port[0] else None
return {'user': user, 'host': host, 'port': port}
def normalize(host_string, omit_port=False):
"""
Normalizes a given host string, returning explicit host, user, port.
If ``omit_port`` is given and is True, only the host and user are returned.
This function will process SSH config files if Fabric is configured to do
so, and will use them to fill in some default values or swap in hostname
aliases.
Regarding SSH port used:
* Ports explicitly given within host strings always win, no matter what.
* When the host string lacks a port, SSH-config driven port configurations
are used next.
* When the SSH config doesn't specify a port (at all - including a default
``Host *`` block), Fabric's internal setting ``env.port`` is consulted.
* If ``env.port`` is empty, ``env.default_port`` is checked (which should
always be, as one would expect, port ``22``).
"""
from rtox.fabric.state import env
# Gracefully handle "empty" input by returning empty output
if not host_string:
return ('', '') if omit_port else ('', '', '')
# Parse host string (need this early on to look up host-specific ssh_config
# values)
r = parse_host_string(host_string)
host = r['host']
# Env values (using defaults if somehow earlier defaults were replaced with
# empty values)
user = env.user or env.local_user
# SSH config data
conf = ssh_config(host_string)
# Only use ssh_config values if the env value appears unmodified from
# the true defaults. If the user has tweaked them, that new value
# takes precedence.
if user == env.local_user and 'user' in conf:
user = conf['user']
# Also override host if needed
if 'hostname' in conf:
host = conf['hostname']
# Merge explicit user/port values with the env/ssh_config derived ones
# (Host is already done at this point.)
user = r['user'] or user
if omit_port:
return user, host
# determine port from ssh config if enabled
ssh_config_port = None
if env.use_ssh_config:
ssh_config_port = conf.get('port', None)
# port priority order (as in docstring)
port = r['port'] or ssh_config_port or env.port or env.default_port
return user, host, port
def to_dict(host_string):
user, host, port = normalize(host_string)
return {
'user': user, 'host': host, 'port': port, 'host_string': host_string
}
def from_dict(arg):
return join_host_strings(arg['user'], arg['host'], arg['port'])
def denormalize(host_string):
"""
Strips out default values for the given host string.
If the user part is the default user, it is removed;
if the port is port 22, it also is removed.
"""
from rtox.fabric.state import env
r = parse_host_string(host_string)
user = ''
if r['user'] is not None and r['user'] != env.user:
user = r['user'] + '@'
port = ''
if r['port'] is not None and r['port'] != '22':
port = ':' + r['port']
host = r['host']
host = '[%s]' % host if port and host.count(':') > 1 else host
return user + host + port
def join_host_strings(user, host, port=None):
"""
Turns user/host/port strings into ``user@host:port`` combined string.
This function is not responsible for handling missing user/port strings;
for that, see the ``normalize`` function.
If ``host`` looks like IPv6 address, it will be enclosed in square brackets
If ``port`` is omitted, the returned string will be of the form
``user@host``.
"""
if port:
# Square brackets are necessary for IPv6 host/port separation
template = "%s@[%s]:%s" if host.count(':') > 1 else "%s@%s:%s"
return template % (user, host, port)
else:
return "%s@%s" % (user, host)
def normalize_to_string(host_string):
"""
normalize() returns a tuple; this returns another valid host string.
"""
return join_host_strings(*normalize(host_string))
def connect(user, host, port, cache, seek_gateway=True):
"""
Create and return a new SSHClient instance connected to given host.
:param user: Username to connect as.
:param host: Network hostname.
:param port: SSH daemon port.
:param cache:
A ``HostConnectionCache`` instance used to cache/store gateway hosts
when gatewaying is enabled.
:param seek_gateway:
Whether to try setting up a gateway socket for this connection. Used so
the actual gateway connection can prevent recursion.
"""
from rtox.fabric.state import env, output
#
# Initialization
#
# Init client
client = ssh.SSHClient()
# Load system hosts file (e.g. /etc/ssh/ssh_known_hosts)
known_hosts = env.get('system_known_hosts')
if known_hosts:
client.load_system_host_keys(known_hosts)
# Load known host keys (e.g. ~/.ssh/known_hosts) unless user says not to.
if not env.disable_known_hosts:
client.load_system_host_keys()
# Unless user specified not to, accept/add new, unknown host keys
if not env.reject_unknown_hosts:
client.set_missing_host_key_policy(ssh.AutoAddPolicy())
#
# Connection attempt loop
#
# Initialize loop variables
connected = False
password = get_password(user, host, port, login_only=True)
tries = 0
sock = None
# Loop until successful connect (keep prompting for new password)
while not connected:
# Attempt connection
try:
tries += 1
# (Re)connect gateway socket, if needed.
# Nuke cached client object if not on initial try.
if seek_gateway:
sock = get_gateway(host, port, cache, replace=tries > 0)
# Set up kwargs (this lets us skip GSS-API kwargs unless explicitly
# set; otherwise older Paramiko versions will be cranky.)
kwargs = dict(
hostname=host,
port=int(port),
username=user,
password=password,
pkey=key_from_env(password),
key_filename=key_filenames(),
timeout=env.timeout,
allow_agent=not env.no_agent,
look_for_keys=not env.no_keys,
sock=sock,
)
for suffix in ('auth', 'deleg_creds', 'kex'):
name = "gss_" + suffix
val = env.get(name, None)
if val is not None:
kwargs[name] = val
# Ready to connect
client.connect(**kwargs)
connected = True
# set a keepalive if desired
if env.keepalive:
client.get_transport().set_keepalive(env.keepalive)
return client
# BadHostKeyException corresponds to key mismatch, i.e. what on the
# command line results in the big banner error about man-in-the-middle
# attacks.
except ssh.BadHostKeyException as e:
raise NetworkError("Host key for %s did not match pre-existing key! Server's key was changed recently, or possible man-in-the-middle attack." % host, e)
# Prompt for new password to try on auth failure
except (
ssh.AuthenticationException,
ssh.PasswordRequiredException,
ssh.SSHException
) as e:
msg = str(e)
# If we get SSHExceptionError and the exception message indicates
# SSH protocol banner read failures, assume it's caused by the
# server load and try again.
#
# If we are using a gateway, we will get a ChannelException if
# connection to the downstream host fails. We should retry.
if (e.__class__ is ssh.SSHException \
and msg == 'Error reading SSH protocol banner') \
or e.__class__ is ssh.ChannelException:
if _tried_enough(tries):
raise NetworkError(msg, e)
continue
# For whatever reason, empty password + no ssh key or agent
# results in an SSHException instead of an
# AuthenticationException. Since it's difficult to do
# otherwise, we must assume empty password + SSHException ==
# auth exception.
#
# Conversely: if we get SSHException and there
# *was* a password -- it is probably something non auth
# related, and should be sent upwards. (This is not true if the
# exception message does indicate key parse problems.)
#
# This also holds true for rejected/unknown host keys: we have to
# guess based on other heuristics.
if (
e.__class__ is ssh.SSHException
and (
password
or msg.startswith('Unknown server')
or "not found in known_hosts" in msg
)
and not is_key_load_error(e)
):
raise NetworkError(msg, e)
# Otherwise, assume an auth exception, and prompt for new/better
# password.
# Paramiko doesn't handle prompting for locked private
# keys (i.e. keys with a passphrase and not loaded into an agent)
# so we have to detect this and tweak our prompt slightly.
# (Otherwise, however, the logic flow is the same, because
# ssh's connect() method overrides the password argument to be
# either the login password OR the private key passphrase. Meh.)
#
# NOTE: This will come up if you normally use a
# passphrase-protected private key with ssh-agent, and enter an
# incorrect remote username, because ssh.connect:
# * Tries the agent first, which will fail as you gave the wrong
# username, so obviously any loaded keys aren't gonna work for a
# nonexistent remote account;
# * Then tries the on-disk key file, which is passphrased;
# * Realizes there's no password to try unlocking that key with,
# because you didn't enter a password, because you're using
# ssh-agent;
# * In this condition (trying a key file, password is None)
# ssh raises PasswordRequiredException.
text = None
if e.__class__ is ssh.PasswordRequiredException \
or is_key_load_error(e):
# NOTE: we can't easily say WHICH key's passphrase is needed,
# because ssh doesn't provide us with that info, and
# env.key_filename may be a list of keys, so we can't know
# which one raised the exception. Best not to try.
prompt = "[%s] Passphrase for private key"
text = prompt % env.host_string
password = prompt_for_password(text)
# Update env.password, env.passwords if empty
set_password(user, host, port, password)
# Ctrl-D / Ctrl-C for exit
# TODO: this may no longer actually serve its original purpose and may
# also hide TypeErrors from paramiko. Double check in v2.
except (EOFError, TypeError):
# Print a newline (in case user was sitting at prompt)
print('')
sys.exit(0)
# Handle DNS error / name lookup failure
except socket.gaierror as e:
raise NetworkError('Name lookup failed for %s' % host, e)
# Handle timeouts and retries, including generic errors
# NOTE: In 2.6, socket.error subclasses IOError
except socket.error as e:
not_timeout = type(e) is not socket.timeout
giving_up = _tried_enough(tries)
# Baseline error msg for when debug is off
msg = "Timed out trying to connect to %s" % host
# Expanded for debug on
err = msg + " (attempt %s of %s)" % (tries, env.connection_attempts)
if giving_up:
err += ", giving up"
err += ")"
# Debuggin'
if output.debug:
sys.stderr.write(err + '\n')
# Having said our piece, try again
if not giving_up:
# Sleep if it wasn't a timeout, so we still get timeout-like
# behavior
if not_timeout:
time.sleep(env.timeout)
continue
# Override eror msg if we were retrying other errors
if not_timeout:
msg = "Low level socket error connecting to host %s on port %s: %s" % (
host, port, e.args[1]
)
# Here, all attempts failed. Tweak error msg to show # tries.
# TODO: find good humanization module, jeez
s = "s" if env.connection_attempts > 1 else ""
msg += " (tried %s time%s)" % (env.connection_attempts, s)
raise NetworkError(msg, e)
# Ensure that if we terminated without connecting and we were given an
# explicit socket, close it out.
finally:
if not connected and sock is not None:
sock.close()
def _password_prompt(prompt, stream):
# NOTE: Using encode-to-ascii to prevent (Windows, at least) getpass from
# choking if given Unicode.
if six.PY3 is False:
prompt = prompt.encode('ascii', 'ignore')
return getpass.getpass(prompt, stream)
def prompt_for_password(prompt=None, no_colon=False, stream=None):
"""
Prompts for and returns a new password if required; otherwise, returns
None.
A trailing colon is appended unless ``no_colon`` is True.
If the user supplies an empty password, the user will be re-prompted until
they enter a non-empty password.
``prompt_for_password`` autogenerates the user prompt based on the current
host being connected to. To override this, specify a string value for
``prompt``.
``stream`` is the stream the prompt will be printed to; if not given,
defaults to ``sys.stderr``.
"""
from rtox.fabric.state import env
handle_prompt_abort("a connection or sudo password")
stream = stream or sys.stderr
# Construct prompt
default = "[%s] Login password for '%s'" % (env.host_string, env.user)
password_prompt = prompt if (prompt is not None) else default
if not no_colon:
password_prompt += ": "
# Get new password value
new_password = _password_prompt(password_prompt, stream)
# Otherwise, loop until user gives us a non-empty password (to prevent
# returning the empty string, and to avoid unnecessary network overhead.)
while not new_password:
print("Sorry, you can't enter an empty password. Please try again.")
new_password = _password_prompt(password_prompt, stream)
return new_password
def needs_host(func):
"""
Prompt user for value of ``env.host_string`` when ``env.host_string`` is
empty.
This decorator is basically a safety net for silly users who forgot to
specify the host/host list in one way or another. It should be used to wrap
operations which require a network connection.
Due to how we execute commands per-host in ``main()``, it's not possible to
specify multiple hosts at this point in time, so only a single host will be
prompted for.
Because this decorator sets ``env.host_string``, it will prompt once (and
only once) per command. As ``main()`` clears ``env.host_string`` between
commands, this decorator will also end up prompting the user once per
command (in the case where multiple commands have no hosts set, of course.)
"""
from rtox.fabric.state import env
@wraps(func)
def host_prompting_wrapper(*args, **kwargs):
while not env.get('host_string', False):
handle_prompt_abort("the target host connection string")
prompt = "No hosts found. Please specify (single) " \
"host string for connection: "
# WARNING: do not use six.moves.input, because test cases to not
# overwrite that method with a faked method from Fudge
if six.PY3 is True:
host_string = input(prompt)
else:
host_string = raw_input(prompt)
env.update(to_dict(host_string))
return func(*args, **kwargs)
host_prompting_wrapper.undecorated = func
return host_prompting_wrapper
def disconnect_all():
"""
Disconnect from all currently connected servers.
Used at the end of ``fab``'s main loop, and also intended for use by
library users.
"""
from rtox.fabric.state import connections, output
# Explicitly disconnect from all servers
for key in list(connections.keys()):
if output.status:
# Here we can't use the py3k print(x, end=" ")
# because 2.5 backwards compatibility
sys.stdout.write("Disconnecting from %s... " % denormalize(key))
connections[key].close()
del connections[key]
if output.status:
sys.stdout.write("done.\n")
| 37.47972 | 164 | 0.629748 |
5994bdac247aaa6e424214c085111430865d364e | 1,406 | py | Python | plugin_manager.py | Wildream/simple-slack-bot-python | 4ac1832091c5823eb40a2054a6f3ae44a9db8ff1 | [
"MIT"
] | null | null | null | plugin_manager.py | Wildream/simple-slack-bot-python | 4ac1832091c5823eb40a2054a6f3ae44a9db8ff1 | [
"MIT"
] | null | null | null | plugin_manager.py | Wildream/simple-slack-bot-python | 4ac1832091c5823eb40a2054a6f3ae44a9db8ff1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import print_function
import glob
import imp
import re
class PluginManager(object):
def __init__(self):
self.plugins_dir = 'plugins'
self.plugins = {}
def init_plugins(self):
print('Searching for plugins...')
module_paths = glob.glob("{0}/[!_]*.py".format(self.plugins_dir))
print('Found {0} plugins.'.format(len(module_paths)))
print('Loading plugins...')
for module_path in module_paths:
module_name = re.match(
r'{0}\/(.*)\.py'.format(self.plugins_dir),
module_path
).group(1)
self.plugins[module_name] = imp.load_source(
'slackbot_plugin_{0}'.format(module_name),
module_path
).Plugin()
print(' * Plugin "{0}" loaded.'.format(module_name))
print('-' * 64)
return None
def plugin_query(self, input_msg, user, channel):
for plugin_name in self.plugins:
for rule in self.plugins[plugin_name].commands:
if not re.match(rule, input_msg):
continue
response = self.plugins[plugin_name].commands[rule](
input_msg,
user,
channel
)
if response is not None:
yield response
| 29.914894 | 73 | 0.532006 |
e6a26d17f55a7dd8b4f4dd3f4706a2621ca1b83b | 24,601 | py | Python | ucloud/services/ucloudstack/schemas/models.py | wangrzneu/ucloud-sdk-python3 | 5115bdb754da8d277f6c1513d0ef9cf9a5fe184d | [
"Apache-2.0"
] | 37 | 2019-06-19T09:41:34.000Z | 2022-02-18T08:06:00.000Z | ucloud/services/ucloudstack/schemas/models.py | wangrzneu/ucloud-sdk-python3 | 5115bdb754da8d277f6c1513d0ef9cf9a5fe184d | [
"Apache-2.0"
] | 90 | 2019-08-09T09:27:33.000Z | 2022-03-30T15:54:55.000Z | ucloud/services/ucloudstack/schemas/models.py | wangrzneu/ucloud-sdk-python3 | 5115bdb754da8d277f6c1513d0ef9cf9a5fe184d | [
"Apache-2.0"
] | 19 | 2019-06-13T02:46:01.000Z | 2021-11-01T07:22:18.000Z | """ Code is generated by ucloud-model, DO NOT EDIT IT. """
from ucloud.core.typesystem import schema, fields
class BindVSInfoSchema(schema.ResponseSchema):
"""BindVSInfo - 证书绑定的vs信息"""
fields = {
"LBID": fields.Str(required=False, load_from="LBID"),
"LBName": fields.Str(required=False, load_from="LBName"),
"Port": fields.Int(required=False, load_from="Port"),
"Protocol": fields.Str(required=False, load_from="Protocol"),
"VSID": fields.Str(required=False, load_from="VSID"),
}
class CertificateInfoSchema(schema.ResponseSchema):
"""CertificateInfo - 证书信息"""
fields = {
"CertificateContent": fields.Str(
required=False, load_from="CertificateContent"
),
"CertificateID": fields.Str(required=False, load_from="CertificateID"),
"CertificateType": fields.Str(
required=False, load_from="CertificateType"
),
"CommonName": fields.Str(required=False, load_from="CommonName"),
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"ExpireTime": fields.Int(required=False, load_from="ExpireTime"),
"Fingerprint": fields.Str(required=False, load_from="Fingerprint"),
"Name": fields.Str(required=False, load_from="Name"),
"Privatekey": fields.Str(required=False, load_from="Privatekey"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"SubjectAlternativeNames": fields.List(fields.Str()),
"VSInfos": fields.List(BindVSInfoSchema()),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class DiskInfoSchema(schema.ResponseSchema):
"""DiskInfo - 磁盘信息"""
fields = {
"AttachResourceID": fields.Str(
required=True, load_from="AttachResourceID"
),
"ChargeType": fields.Str(required=True, load_from="ChargeType"),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"DiskID": fields.Str(required=True, load_from="DiskID"),
"DiskStatus": fields.Str(required=True, load_from="DiskStatus"),
"DiskType": fields.Str(required=True, load_from="DiskType"),
"ExpireTime": fields.Int(required=True, load_from="ExpireTime"),
"Name": fields.Str(required=True, load_from="Name"),
"Region": fields.Str(required=True, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"SetType": fields.Str(required=True, load_from="SetType"),
"Size": fields.Int(required=True, load_from="Size"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class EIPInfoSchema(schema.ResponseSchema):
"""EIPInfo - 外网IP信息"""
fields = {
"Bandwidth": fields.Int(required=False, load_from="Bandwidth"),
"BindResourceID": fields.Str(
required=False, load_from="BindResourceID"
),
"BindResourceType": fields.Str(
required=False, load_from="BindResourceType"
),
"CanDefaultGW": fields.Int(required=False, load_from="CanDefaultGW"),
"ChargeType": fields.Str(required=False, load_from="ChargeType"),
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"EIPID": fields.Str(required=False, load_from="EIPID"),
"ExpireTime": fields.Int(required=False, load_from="ExpireTime"),
"IP": fields.Str(required=False, load_from="IP"),
"IPVersion": fields.Str(required=False, load_from="IPVersion"),
"ISDefaultGW": fields.Int(required=False, load_from="ISDefaultGW"),
"Name": fields.Str(required=False, load_from="Name"),
"OperatorName": fields.Str(required=False, load_from="OperatorName"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"Status": fields.Str(required=False, load_from="Status"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class ImageInfoSchema(schema.ResponseSchema):
"""ImageInfo - 镜像信息"""
fields = {
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"ImageID": fields.Str(required=True, load_from="ImageID"),
"ImageStatus": fields.Str(required=True, load_from="ImageStatus"),
"ImageType": fields.Str(required=True, load_from="ImageType"),
"Name": fields.Str(required=True, load_from="Name"),
"OSDistribution": fields.Str(required=True, load_from="OSDistribution"),
"OSName": fields.Str(required=True, load_from="OSName"),
"OSType": fields.Str(required=True, load_from="OSType"),
"Region": fields.Str(required=True, load_from="Region"),
"SetArch": fields.Str(required=True, load_from="SetArch"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class LBInfoSchema(schema.ResponseSchema):
"""LBInfo - 负载均衡信息"""
fields = {
"AlarmTemplateID": fields.Str(
required=True, load_from="AlarmTemplateID"
),
"ChargeType": fields.Str(required=True, load_from="ChargeType"),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"ExpireTime": fields.Int(required=True, load_from="ExpireTime"),
"LBID": fields.Str(required=True, load_from="LBID"),
"LBStatus": fields.Str(required=True, load_from="LBStatus"),
"LBType": fields.Str(required=True, load_from="LBType"),
"Name": fields.Str(required=True, load_from="Name"),
"PrivateIP": fields.Str(required=False, load_from="PrivateIP"),
"PublicIP": fields.Str(required=False, load_from="PublicIP"),
"Region": fields.Str(required=True, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"SGID": fields.Str(required=False, load_from="SGID"),
"SubnetID": fields.Str(required=True, load_from="SubnetID"),
"VPCID": fields.Str(required=True, load_from="VPCID"),
"VSCount": fields.Int(required=True, load_from="VSCount"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class MetricSetSchema(schema.ResponseSchema):
"""MetricSet - 监控值"""
fields = {
"Timestamp": fields.Int(required=False, load_from="Timestamp"),
"Value": fields.Float(required=False, load_from="Value"),
}
class MetricInfoSchema(schema.ResponseSchema):
"""MetricInfo - 监控信息"""
fields = {
"Infos": fields.List(MetricSetSchema()),
"MetricName": fields.Str(required=False, load_from="MetricName"),
}
class NATGWInfoSchema(schema.ResponseSchema):
"""NATGWInfo - NAT网关信息"""
fields = {
"AlarmTemplateID": fields.Str(
required=True, load_from="AlarmTemplateID"
),
"ChargeType": fields.Str(required=True, load_from="ChargeType"),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"EIP": fields.Str(required=True, load_from="EIP"),
"ExpireTime": fields.Int(required=True, load_from="ExpireTime"),
"NATGWID": fields.Str(required=True, load_from="NATGWID"),
"NATGWStatus": fields.Str(required=True, load_from="NATGWStatus"),
"Name": fields.Str(required=True, load_from="Name"),
"Region": fields.Str(required=True, load_from="Region"),
"Remark": fields.Str(required=True, load_from="Remark"),
"SGID": fields.Str(required=True, load_from="SGID"),
"SubnetID": fields.Str(required=True, load_from="SubnetID"),
"VPCID": fields.Str(required=True, load_from="VPCID"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class NATGWRuleInfoSchema(schema.ResponseSchema):
"""NATGWRuleInfo - NAT网关关联的白名单资源信息"""
fields = {
"BindResourceID": fields.Str(required=True, load_from="BindResourceID"),
"BindResourceType": fields.Str(
required=True, load_from="BindResourceType"
),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"IP": fields.Str(required=True, load_from="IP"),
"NATGWID": fields.Str(required=True, load_from="NATGWID"),
"NATGWType": fields.Str(required=True, load_from="NATGWType"),
"Name": fields.Str(required=True, load_from="Name"),
"RuleID": fields.Str(required=True, load_from="RuleID"),
"RuleStatus": fields.Str(required=True, load_from="RuleStatus"),
}
class NICInfoSchema(schema.ResponseSchema):
"""NICInfo - 网卡信息"""
fields = {
"BindResourceID": fields.Str(required=True, load_from="BindResourceID"),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"IP": fields.Str(required=True, load_from="IP"),
"MAC": fields.Str(required=True, load_from="MAC"),
"NICID": fields.Str(required=True, load_from="NICID"),
"NICStatus": fields.Str(required=True, load_from="NICStatus"),
"Name": fields.Str(required=True, load_from="Name"),
"Region": fields.Str(required=True, load_from="Region"),
"Remark": fields.Str(required=True, load_from="Remark"),
"SGID": fields.Str(required=True, load_from="SGID"),
"SubnetID": fields.Str(required=True, load_from="SubnetID"),
"VPCID": fields.Str(required=True, load_from="VPCID"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class OPLogInfoSchema(schema.ResponseSchema):
"""OPLogInfo - 操作日志"""
fields = {
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"IsSuccess": fields.Str(required=False, load_from="IsSuccess"),
"OPLogsID": fields.Str(required=False, load_from="OPLogsID"),
"OPName": fields.Str(required=False, load_from="OPName"),
"OPTime": fields.Int(required=False, load_from="OPTime"),
"OpMessage": fields.Str(required=False, load_from="OpMessage"),
"Region": fields.Str(required=False, load_from="Region"),
"ResourceID": fields.Str(required=False, load_from="ResourceID"),
"ResourceType": fields.Int(required=False, load_from="ResourceType"),
"RetCode": fields.Int(required=False, load_from="RetCode"),
"UserEmail": fields.Str(required=False, load_from="UserEmail"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class PhysicalIPInfoSchema(schema.ResponseSchema):
"""PhysicalIPInfo - 物理IP信息"""
fields = {
"BindResourceID": fields.Str(required=True, load_from="BindResourceID"),
"BindResourceType": fields.Str(
required=True, load_from="BindResourceType"
),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"IP": fields.Str(required=True, load_from="IP"),
"Name": fields.Str(required=True, load_from="Name"),
"OperatorName": fields.Str(required=True, load_from="OperatorName"),
"PhysicalIPID": fields.Str(required=True, load_from="PhysicalIPID"),
"Region": fields.Str(required=True, load_from="Region"),
"Remark": fields.Str(required=True, load_from="Remark"),
"Status": fields.Str(required=True, load_from="Status"),
"UpdateTime": fields.Int(required=True, load_from="UpdateTime"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class RSInfoSchema(schema.ResponseSchema):
"""RSInfo - 转发规则关联的服务节点信息"""
fields = {
"BindResourceID": fields.Str(required=True, load_from="BindResourceID"),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"IP": fields.Str(required=True, load_from="IP"),
"LBID": fields.Str(required=True, load_from="LBID"),
"Name": fields.Str(required=True, load_from="Name"),
"Port": fields.Int(required=True, load_from="Port"),
"RSID": fields.Str(required=True, load_from="RSID"),
"RSMode": fields.Str(required=True, load_from="RSMode"),
"RSStatus": fields.Str(required=True, load_from="RSStatus"),
"UpdateTime": fields.Int(required=True, load_from="UpdateTime"),
"VSID": fields.Str(required=True, load_from="VSID"),
"Weight": fields.Int(required=True, load_from="Weight"),
}
class RecycledResourceInfoSchema(schema.ResponseSchema):
"""RecycledResourceInfo - 回收站资源信息"""
fields = {
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"DeleteTime": fields.Int(required=True, load_from="DeleteTime"),
"Description": fields.Str(required=True, load_from="Description"),
"ExpireTime": fields.Int(required=True, load_from="ExpireTime"),
"IsAutoTerminated": fields.Bool(
required=True, load_from="IsAutoTerminated"
),
"Name": fields.Str(required=True, load_from="Name"),
"Region": fields.Str(required=True, load_from="Region"),
"ResourceID": fields.Str(required=True, load_from="ResourceID"),
"ResourceType": fields.Str(required=True, load_from="ResourceType"),
"Status": fields.Str(required=False, load_from="Status"),
"WillTerminateTime": fields.Int(
required=True, load_from="WillTerminateTime"
),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class SGRuleInfoSchema(schema.ResponseSchema):
"""SGRuleInfo - 安全组规则信息"""
fields = {
"DstPort": fields.Str(required=False, load_from="DstPort"),
"IsIn": fields.Str(required=False, load_from="IsIn"),
"Priority": fields.Str(required=False, load_from="Priority"),
"ProtocolType": fields.Str(required=False, load_from="ProtocolType"),
"RuleAction": fields.Str(required=False, load_from="RuleAction"),
"RuleID": fields.Str(required=False, load_from="RuleID"),
"SrcIP": fields.Str(required=False, load_from="SrcIP"),
}
class SGInfoSchema(schema.ResponseSchema):
"""SGInfo - 安全组信息"""
fields = {
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"Name": fields.Str(required=False, load_from="Name"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"ResourceCount": fields.Int(required=False, load_from="ResourceCount"),
"Rule": fields.List(SGRuleInfoSchema()),
"RuleCount": fields.Int(required=False, load_from="RuleCount"),
"SGID": fields.Str(required=False, load_from="SGID"),
"Status": fields.Str(required=False, load_from="Status"),
"UpdateTime": fields.Int(required=False, load_from="UpdateTime"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class SGResourceInfoSchema(schema.ResponseSchema):
"""SGResourceInfo - 安全组绑定的资源信息"""
fields = {
"Name": fields.Str(required=True, load_from="Name"),
"Region": fields.Str(required=True, load_from="Region"),
"ResourceID": fields.Str(required=True, load_from="ResourceID"),
"ResourceType": fields.Str(required=True, load_from="ResourceType"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class SnapshotInfoSchema(schema.ResponseSchema):
"""SnapshotInfo - 快照的详细信息"""
fields = {
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"DiskID": fields.Str(required=False, load_from="DiskID"),
"DiskType": fields.Str(required=False, load_from="DiskType"),
"Name": fields.Str(required=False, load_from="Name"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"SnapshotID": fields.Str(required=False, load_from="SnapshotID"),
"SnapshotStatus": fields.Str(
required=False, load_from="SnapshotStatus"
),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class StorageTypeInfoSchema(schema.ResponseSchema):
"""StorageTypeInfo - 存储类型信息"""
fields = {
"Region": fields.Str(required=True, load_from="Region"),
"SetArch": fields.Str(required=True, load_from="SetArch"),
"StorageType": fields.Str(required=True, load_from="StorageType"),
"StorageTypeAlias": fields.Str(
required=True, load_from="StorageTypeAlias"
),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class SubnetInfoSchema(schema.ResponseSchema):
"""SubnetInfo - 子网信息"""
fields = {
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"Name": fields.Str(required=False, load_from="Name"),
"Network": fields.Str(required=False, load_from="Network"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"State": fields.Str(required=False, load_from="State"),
"SubnetID": fields.Str(required=False, load_from="SubnetID"),
"UpdateTime": fields.Int(required=False, load_from="UpdateTime"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class UserInfoSchema(schema.ResponseSchema):
"""UserInfo - 租户信息"""
fields = {
"Amount": fields.Float(required=False, load_from="Amount"),
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"Email": fields.Str(required=False, load_from="Email"),
"PrivateKey": fields.Str(required=False, load_from="PrivateKey"),
"PublicKey": fields.Str(required=False, load_from="PublicKey"),
"Status": fields.Str(required=False, load_from="Status"),
"UpdateTime": fields.Int(required=False, load_from="UpdateTime"),
"UserID": fields.Int(required=False, load_from="UserID"),
}
class VMDiskInfoSchema(schema.ResponseSchema):
"""VMDiskInfo - 虚拟机磁盘信息"""
fields = {
"DiskID": fields.Str(required=False, load_from="DiskID"),
"Drive": fields.Str(required=False, load_from="Drive"),
"IsElastic": fields.Str(required=False, load_from="IsElastic"),
"Name": fields.Str(required=False, load_from="Name"),
"Size": fields.Int(required=False, load_from="Size"),
"Type": fields.Str(required=False, load_from="Type"),
}
class VMIPInfoSchema(schema.ResponseSchema):
"""VMIPInfo - 虚拟机IP信息"""
fields = {
"IP": fields.Str(required=False, load_from="IP"),
"IPVersion": fields.Str(required=False, load_from="IPVersion"),
"InterfaceID": fields.Str(required=False, load_from="InterfaceID"),
"IsElastic": fields.Str(required=False, load_from="IsElastic"),
"MAC": fields.Str(required=False, load_from="MAC"),
"SGID": fields.Str(required=False, load_from="SGID"),
"SGName": fields.Str(required=False, load_from="SGName"),
"SubnetID": fields.Str(required=False, load_from="SubnetID"),
"SubnetName": fields.Str(required=False, load_from="SubnetName"),
"Type": fields.Str(required=False, load_from="Type"),
"VPCID": fields.Str(required=False, load_from="VPCID"),
"VPCName": fields.Str(required=False, load_from="VPCName"),
}
class VMInstanceInfoSchema(schema.ResponseSchema):
"""VMInstanceInfo - UCloudStack虚拟机信息"""
fields = {
"CPU": fields.Int(required=False, load_from="CPU"),
"ChargeType": fields.Str(required=False, load_from="ChargeType"),
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"DiskInfos": fields.List(VMDiskInfoSchema()),
"ExpireTime": fields.Int(required=False, load_from="ExpireTime"),
"IPInfos": fields.List(VMIPInfoSchema()),
"ImageID": fields.Str(required=False, load_from="ImageID"),
"Memory": fields.Int(required=False, load_from="Memory"),
"Name": fields.Str(required=False, load_from="Name"),
"OSName": fields.Str(required=False, load_from="OSName"),
"OSType": fields.Str(required=False, load_from="OSType"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"State": fields.Str(required=False, load_from="State"),
"SubnetID": fields.Str(required=False, load_from="SubnetID"),
"SubnetName": fields.Str(required=False, load_from="SubnetName"),
"VMID": fields.Str(required=False, load_from="VMID"),
"VMType": fields.Str(required=False, load_from="VMType"),
"VMTypeAlias": fields.Str(required=False, load_from="VMTypeAlias"),
"VPCID": fields.Str(required=False, load_from="VPCID"),
"VPCName": fields.Str(required=False, load_from="VPCName"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class VMTypeInfoSchema(schema.ResponseSchema):
"""VMTypeInfo - 主机机型信息"""
fields = {
"Region": fields.Str(required=True, load_from="Region"),
"SetArch": fields.Str(required=True, load_from="SetArch"),
"VMType": fields.Str(required=True, load_from="VMType"),
"VMTypeAlias": fields.Str(required=True, load_from="VMTypeAlias"),
"Zone": fields.Str(required=True, load_from="Zone"),
}
class VPCInfoSchema(schema.ResponseSchema):
"""VPCInfo - VPC信息"""
fields = {
"CreateTime": fields.Int(required=False, load_from="CreateTime"),
"Name": fields.Str(required=False, load_from="Name"),
"Network": fields.Str(required=False, load_from="Network"),
"Region": fields.Str(required=False, load_from="Region"),
"Remark": fields.Str(required=False, load_from="Remark"),
"State": fields.Str(required=False, load_from="State"),
"SubnetCount": fields.Int(required=False, load_from="SubnetCount"),
"SubnetInfos": fields.List(SubnetInfoSchema()),
"UpdateTime": fields.Int(required=False, load_from="UpdateTime"),
"VPCID": fields.Str(required=False, load_from="VPCID"),
"Zone": fields.Str(required=False, load_from="Zone"),
}
class VSPolicyInfoSchema(schema.ResponseSchema):
"""VSPolicyInfo - 内容转发规则信息"""
fields = {
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"Domain": fields.Str(required=True, load_from="Domain"),
"LBID": fields.Str(required=True, load_from="LBID"),
"Path": fields.Str(required=True, load_from="Path"),
"PolicyID": fields.Str(required=True, load_from="PolicyID"),
"PolicyStatus": fields.Str(required=True, load_from="PolicyStatus"),
"RSInfos": fields.List(RSInfoSchema()),
"UpdateTime": fields.Int(required=True, load_from="UpdateTime"),
"VSID": fields.Str(required=True, load_from="VSID"),
}
class VSInfoSchema(schema.ResponseSchema):
"""VSInfo - VServer信息"""
fields = {
"AlarmTemplateID": fields.Str(
required=True, load_from="AlarmTemplateID"
),
"CACertificateID": fields.Str(
required=False, load_from="CACertificateID"
),
"CreateTime": fields.Int(required=True, load_from="CreateTime"),
"Domain": fields.Str(required=False, load_from="Domain"),
"HealthcheckType": fields.Str(
required=True, load_from="HealthcheckType"
),
"KeepaliveTimeout": fields.Int(
required=True, load_from="KeepaliveTimeout"
),
"LBID": fields.Str(required=True, load_from="LBID"),
"Path": fields.Str(required=False, load_from="Path"),
"PersistenceKey": fields.Str(
required=False, load_from="PersistenceKey"
),
"PersistenceType": fields.Str(
required=True, load_from="PersistenceType"
),
"Port": fields.Int(required=True, load_from="Port"),
"Protocol": fields.Str(required=True, load_from="Protocol"),
"RSHealthStatus": fields.Str(required=True, load_from="RSHealthStatus"),
"RSInfos": fields.List(RSInfoSchema()),
"SSLMode": fields.Str(required=False, load_from="SSLMode"),
"Scheduler": fields.Str(required=True, load_from="Scheduler"),
"ServerCertificateID": fields.Str(
required=False, load_from="ServerCertificateID"
),
"UpdateTime": fields.Int(required=True, load_from="UpdateTime"),
"VSID": fields.Str(required=True, load_from="VSID"),
"VSPolicyInfos": fields.List(VSPolicyInfoSchema()),
"VSStatus": fields.Str(required=True, load_from="VSStatus"),
}
class PriceInfoSchema(schema.ResponseSchema):
"""PriceInfo - 价格信息"""
fields = {
"ChargeType": fields.Str(required=True, load_from="ChargeType"),
"Price": fields.Float(required=True, load_from="Price"),
}
| 43.85205 | 80 | 0.649161 |
1624081fe12775e6430fa6a20048ec3a52c12b1a | 1,142 | py | Python | examples/random_elements_with_probability.py | Worvast/python-utils | 1cb4bf9e73b589b1e543685cb0427f05e1731165 | [
"MIT"
] | 4 | 2019-02-20T16:59:39.000Z | 2020-04-08T02:04:58.000Z | examples/random_elements_with_probability.py | Worvast/python-utils | 1cb4bf9e73b589b1e543685cb0427f05e1731165 | [
"MIT"
] | 29 | 2019-02-22T16:19:25.000Z | 2022-03-31T13:02:14.000Z | examples/random_elements_with_probability.py | Worvast/python-utils | 1cb4bf9e73b589b1e543685cb0427f05e1731165 | [
"MIT"
] | 2 | 2019-02-22T14:32:28.000Z | 2020-03-19T15:46:06.000Z | from devoutils.faker import SyslogFakeGenerator
from collections import OrderedDict
import random
from devo.sender import Sender
def get_choices():
"""Get dict with options and probability of be selected"""
return OrderedDict([("Failed", 0.5),
("Success", 0.2),
("Totally broken", 0.1),
("404", 0.1),
("500", 0.05),
("What?", 0.05)])
if __name__ == "__main__":
with open("./random_elements_with_probability.jinja2", 'r') as myfile:
template = myfile.read()
con = None
# This example need a sender con
# Example
# con = Sender(config="./config.yaml")
custom = {"choices": get_choices}
# If you remove simulation or set to false, data will be send
f = SyslogFakeGenerator(engine=con,
template=template,
simulation=True,
probability=100,
frequency=(1, 1),
providers=custom,
verbose=True)
f.start()
| 30.864865 | 74 | 0.507881 |
f75cbacb34696af5531fc4f834d1d064c8056fe0 | 343 | py | Python | psi/serialize/int.py | delta-mpc/python-psi | 1665de12a713b37abd889268c66de84cddb1bf84 | [
"Apache-2.0"
] | 35 | 2021-05-28T10:03:09.000Z | 2022-03-24T12:08:19.000Z | psi/serialize/int.py | delta-mpc/python-psi | 1665de12a713b37abd889268c66de84cddb1bf84 | [
"Apache-2.0"
] | 9 | 2021-07-15T09:16:34.000Z | 2022-03-31T03:59:16.000Z | psi/serialize/int.py | delta-mpc/python-psi | 1665de12a713b37abd889268c66de84cddb1bf84 | [
"Apache-2.0"
] | 16 | 2021-06-18T02:18:56.000Z | 2022-03-25T02:43:48.000Z | __all__ = ["int_to_bytes", "bytes_to_int"]
def int_to_bytes(value: int):
byte_length = (value.bit_length() + 7) // 8
byte_length = (byte_length + 3) // 4 * 4
byte_length = 4 if byte_length == 0 else byte_length
return value.to_bytes(byte_length, "big")
def bytes_to_int(data: bytes):
return int.from_bytes(data, "big")
| 26.384615 | 56 | 0.673469 |
e8bf56587376642b882d2cadbeefb037a69a9258 | 1,077 | bzl | Python | go/go.bzl | dprotaso/rules_docker | 6d9fdf2ca948ba4cfe3da778bb5afae71a3cf8dd | [
"Apache-2.0"
] | null | null | null | go/go.bzl | dprotaso/rules_docker | 6d9fdf2ca948ba4cfe3da778bb5afae71a3cf8dd | [
"Apache-2.0"
] | null | null | null | go/go.bzl | dprotaso/rules_docker | 6d9fdf2ca948ba4cfe3da778bb5afae71a3cf8dd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# !!!! THIS IS A GENERATED FILE TO NOT EDIT IT BY HAND !!!!
#
# To regenerate this file, run ./update_deps.sh from the root of the
# git repository.
DIGESTS = {
# "gcr.io/distroless/base:debug" circa 2018-10-15 14:08 +0200
"debug": "sha256:f1220177b570f74608e2b5868220c6c1ca190b51ceb8db26a131ff7c13b1e648",
# "gcr.io/distroless/base:latest" circa 2018-10-15 14:08 +0200
"latest": "sha256:628939ac8bf3f49571d05c6c76b8688cb4a851af6c7088e599388259875bde20",
}
| 41.423077 | 88 | 0.753946 |
142a36e6df94b14e3543795d670f5c51b98336dc | 1,110 | py | Python | Recommendation/basiclib.py | city292/DeepLearningBook | 778add16d7e515d4fdbc5902e3931395e51585c8 | [
"MIT"
] | null | null | null | Recommendation/basiclib.py | city292/DeepLearningBook | 778add16d7e515d4fdbc5902e3931395e51585c8 | [
"MIT"
] | null | null | null | Recommendation/basiclib.py | city292/DeepLearningBook | 778add16d7e515d4fdbc5902e3931395e51585c8 | [
"MIT"
] | null | null | null | # coding: utf-8
#
# basiclib.py
#
# Author: Huang Anbu
# Date: 2017.3
#
# Description: Basic Configuration and Interface
#
# options: hyper-parameter setting
#
# optimizer: optimization algorithm
#
# Copyright©2017. All Rights Reserved.
# ===============================================================================================
import os
import sys
import numpy
import theano
import time
import cPickle
import itertools
import glob, gzip
import theano.tensor as T
from theano.tensor.signal import pool
from theano.tensor.nnet import conv2d
import matplotlib.pyplot as plt
from collections import *
import optimization
from theano.tensor.shared_randomstreams import RandomStreams
options = {
"batch_size" : 1,
"lr" : 0.05,
"cd_k" : 15,
"n_hidden" : 100,
"print_freq" : 50,
"valid_freq" : 50,
"n_epoch" : 100,
"optimizer" : "adadelta"
}
optimizer = {"sgd" : optimization.sgd,
"momentum" : optimization.momentum,
"nesterov_momentum" : optimization.nesterov_momentum,
"adagrad" : optimization.adagrad,
"adadelta" : optimization.adadelta,
"rmsprop" : optimization.rmsprop}
| 20.943396 | 97 | 0.669369 |
a3995f6e8dd4cc5e754f58c79e12b4b95ca338e6 | 1,743 | py | Python | some_completed_notebooks/energy_levels_not_commented/hamiltonian.py | Quantum-Computing-Cooperation/Quantum_Hackathon_2021 | 69bb50ebd93cbc2bdca59de360408a5a2ecd6f2e | [
"Apache-2.0"
] | 4 | 2021-11-17T19:42:15.000Z | 2022-03-05T18:36:59.000Z | some_completed_notebooks/energy_levels_not_commented/hamiltonian.py | Quantum-Computing-Cooperation/Quantum_Hackathon_2021 | 69bb50ebd93cbc2bdca59de360408a5a2ecd6f2e | [
"Apache-2.0"
] | null | null | null | some_completed_notebooks/energy_levels_not_commented/hamiltonian.py | Quantum-Computing-Cooperation/Quantum_Hackathon_2021 | 69bb50ebd93cbc2bdca59de360408a5a2ecd6f2e | [
"Apache-2.0"
] | 6 | 2021-11-19T18:37:07.000Z | 2021-11-20T22:26:17.000Z | import numpy as np
from qiskit.quantum_info import Pauli
from qiskit.opflow import PauliOp, SummedOp
def generate_pauli(idx_x,idx_z,n):
xmask = [0] * n
zmask = [0] * n
for i in idx_x: xmask[i] = 1
for i in idx_z: zmask[i] = 1
a_x = np.asarray(xmask, dtype=bool)
a_z = np.asarray(zmask, dtype=bool)
return Pauli((a_z, a_x))
def generate_XYZ(J_x,J_y,J_z,field,n_spins=3, pbc=True):
int_x_list = []
int_y_list = []
int_z_list = []
field_list = []
if pbc:
int_z_list.append(generate_pauli([], [0, n_spins - 1], n_spins))
int_x_list.append(generate_pauli([0, n_spins - 1], [], n_spins))
int_y_list.append(generate_pauli([0, n_spins - 1], [0, n_spins - 1], n_spins))
for i in range(n_spins - 1):
int_z_list.append(generate_pauli([], [i, i + 1], n_spins))
for i in range(n_spins - 1):
int_x_list.append(generate_pauli([i, i + 1], [], n_spins))
for i in range(n_spins - 1):
int_y_list.append(generate_pauli([i, i + 1], [i, i + 1], n_spins))
for i in range(n_spins):
field_list.append(generate_pauli([i], [], n_spins))
int_x_coeff = [J_x] * len(int_x_list)
int_y_coeff = [J_y] * len(int_y_list)
int_z_coeff = [J_z] * len(int_z_list)
field_coeff = [field] * len(field_list)
H = PauliOp(int_x_list[0], int_x_coeff[0])
for i in range(1, len(int_x_list)):
H = H + PauliOp(int_x_list[i], int_x_coeff[i])
for i in range(0, len(int_y_list)):
H = H + PauliOp(int_y_list[i], int_y_coeff[i])
for i in range(0, len(int_z_list)):
H = H + PauliOp(int_z_list[i], int_z_coeff[i])
for i in range(len(field_list)):
H = H + PauliOp(field_list[i], field_coeff[i])
return H
| 37.891304 | 86 | 0.619048 |
68b321279218e6c6c5192f66477e08d0aded5abc | 1,529 | py | Python | list1/task3/agent.py | ErykKrupa/metaheuristic-algorithms | cdeb283b4bb052328fd1d06e0da398a2932a7602 | [
"MIT"
] | null | null | null | list1/task3/agent.py | ErykKrupa/metaheuristic-algorithms | cdeb283b4bb052328fd1d06e0da398a2932a7602 | [
"MIT"
] | null | null | null | list1/task3/agent.py | ErykKrupa/metaheuristic-algorithms | cdeb283b4bb052328fd1d06e0da398a2932a7602 | [
"MIT"
] | null | null | null | import numpy as np
EMPTY = 0
WALL = 1
AGENT = 5
EXIT = 8
class Agent:
def __init__(self, board):
agent_position = np.where(board == AGENT)
self.board = board.copy()
self.current_position = [int(agent_position[0]), int(agent_position[1])]
def look(self, directory):
return self._look(self._get_changes(directory))
def go(self, directory):
changes = self._get_changes(directory)
neighbour = self._look(changes)
new_position = [self.current_position[0] + changes[0], self.current_position[1] + changes[1]]
if neighbour == EMPTY:
self.board[self.current_position[0], self.current_position[1]] = EMPTY
self.board[new_position[0], new_position[1]] = AGENT
self.current_position = new_position
elif neighbour == WALL:
raise HitWallException
elif neighbour == EXIT:
self.board[self.current_position[0], self.current_position[1]] = EMPTY
self.current_position = new_position
return directory
def _look(self, changes):
return int(self.board[self.current_position[0] + changes[0],
self.current_position[1] + changes[1]])
def _get_changes(self, directory):
return self._changes[directory]
_changes = {
'U': [-1, 0], # UP
'D': [1, 0], # DOWN
'L': [0, -1], # LEFT
'R': [0, 1], # RIGHT
'H': [0, 0] # HERE
}
class HitWallException(Exception):
pass
| 29.980392 | 101 | 0.59189 |
4f4cc4e0819148bfa4f8821a8b74a66a3740ff31 | 910 | py | Python | 06.7_bst_mirror.py | EashanKaushik/Data-Structures | e5bd391e029cb47e650d5665647ff57590b9b343 | [
"MIT"
] | null | null | null | 06.7_bst_mirror.py | EashanKaushik/Data-Structures | e5bd391e029cb47e650d5665647ff57590b9b343 | [
"MIT"
] | null | null | null | 06.7_bst_mirror.py | EashanKaushik/Data-Structures | e5bd391e029cb47e650d5665647ff57590b9b343 | [
"MIT"
] | null | null | null | class Node:
def __init__(self, data=None, left=None, right=None):
self._left = left
self._data = data
self._right = right
@property
def left(self):
return self._left
@left.setter
def left(self, left):
self._left = left
@property
def right(self):
return self._right
@right.setter
def right(self, right):
self._right = right
@property
def data(self):
return self._data
@data.setter
def data(self, left):
self._data = data
def inorder(root):
if root:
inorder(root.left)
print(root.data)
inorder(root.right)
def mirror(root):
if root:
mirror(root.left)
mirror(root.right)
root.left, root.right = root.right, root.left
if __name__ == '__main__':
tree = Node(2)
tree.left = Node(0)
tree.left.left = Node(-5)
tree.left.right = Node(1)
tree.right = Node(10)
tree.right.left = Node(5)
tree.right.right = Node(15)
inorder(tree)
mirror(tree)
inorder(tree)
| 16.545455 | 54 | 0.68022 |
87077970b374e46bc22e2caa861cb145cc70d048 | 6,605 | py | Python | enos/sample/SampleHelper.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 5 | 2020-05-09T09:21:22.000Z | 2021-05-31T02:42:56.000Z | enos/sample/SampleHelper.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 4 | 2020-04-18T03:28:09.000Z | 2021-05-28T09:41:05.000Z | enos/sample/SampleHelper.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 7 | 2020-01-19T07:58:28.000Z | 2021-12-22T06:53:43.000Z | import locale
from enos.core.MqttClient import MqttClient
from enos.core.constant.DeviceCredential import DeviceCredential
from enos.core.login.DynamicActivationLogin import DynamicActivationLogin
from enos.core.login.MessageIntegrationLogin import MessageIntegrationLogin
from enos.core.login.NormalDeviceLogin import NormalDeviceLogin
from enos.core.util.StringI18n import StringI18n
from enos.message.upstream.topo.SubDeviceInfo import SubDeviceInfo
class SampleHelper:
TCP_SERVER_URL = "tcp://xxx:11883" # for beta tcp connection
SSL_SERVER_URL = "ssl://xxx:18883" # for beta ssl connection
GW_PRODUCT_KEY = "gw_product_key"
GW_PRODUCT_SECRET = "gw_product_secret"
GW_DEVICE_KEY = "gw_device_key"
GW_DEVICE_SECRET = "gw_device_secret"
# measurepoint_resume/device_register/report_measurepoint
GW1_PRODUCT_KEY = "gw1_product_key"
GW1_PRODUCT_SECRET = "gw1_product_secret"
GW1_DEVICE_KEY = "gw1_device_key"
GW1_DEVICE_SECRET = "gw1_device_secret"
# get_tsl_template/attribute_query
GW2_PRODUCT_KEY = "gw2_product_key"
GW2_PRODUCT_SECRET = "gw2_product_secret"
GW2_DEVICE_KEY = "gw2_device_key"
GW2_DEVICE_SECRET = "gw2_device_secret"
# login_device/ota/topo
GW3_PRODUCT_KEY = "gw3_product_key"
GW3_PRODUCT_SECRET = "gw3_product_secret"
GW3_DEVICE_KEY = "gw3_device_key"
GW3_DEVICE_SECRET = "gw3_device_secret"
GW_PRODUCT_KEY_RAW = "gw_product_key_raw"
GW_PRODUCT_SECRET_RAW = "gw_product_secret_raw"
GW_DEVICE_KEY_RAW = "gw_device_key_raw"
GW_DEVICE_SECRET_RAW = "gw_device_secret_raw"
# sud_device
SUB_PRODUCT_KEY = "sub_product_key"
SUB_PRODUCT_SECRET = "sub_product_secret"
SUB_DEVICE_KEY = "sub_device_key"
SUB_DEVICE_SECRET = "sub_device_secret"
# up raw device
SUB_PRODUCT_KEY_RAW = "sub_product_key_raw"
SUB_PRODUCT_SECRET_RAW = "sub_product_secret_raw"
SUB_DEVICE_KEY_RAW = "sub_device_key_raw"
SUB_DEVICE_SECRET_RAW = "sub_device_secret_raw"
# down raw device
SUB_PRODUCT_KEY_DOWN_RAW = "sub_product_key_down_raw"
SUB_PRODUCT_SECRET_DOWN_RAW = "sub_product_secret_down_raw"
SUB_DEVICE_KEY_DOWN_RAW = "sub_device_key_down_raw"
SUB_DEVICE_SECRET_DOWN_RAW = "sub_device_secret_down_raw"
# get_sub_tsl_template
SUB_PRODUCT_KEY_TSL_TEMPLATE = "sub_product_key_tsl_template"
SUB_PRODUCT_SECRET_TSL_TEMPLATE = "sub_product_secret_tsl_template"
SUB_DEVICE_KEY_TSL_TEMPLATE = "sub_device_key_tsl_template"
SUB_DEVICE_SECRET_TSL_TEMPLATE = "sub_device_secret_tsl_template"
# topo request
SUB_PRODUCT_KEY_TOPO = "sub_product_key_topo"
SUB_PRODUCT_SECRET_TOPO = "sub_product_secret_topo"
SUB_DEVICE_KEY_TOPO = "sub_device_key_topo"
SUB_DEVICE_SECRET_TOPO = "sub_device_secret_topo"
# device_register/MEASUREPOINT_BATCH_RESUME/report_measurepoint
SUB1_PRODUCT_KEY = "sub1_product_key"
SUB1_PRODUCT_SECRET = "sub1_product_secret"
SUB1_DEVICE_KEY = "sub1_device_key"
SUB1_DEVICE_SECRET = "sub1_device_secret"
# loginbatch/Ota
SUB2_PRODUCT_KEY = "sub2_product_key"
SUB2_PRODUCT_SECRET = "sub2_product_secret"
SUB2_DEVICE_KEY = "sub2_device_key"
SUB2_DEVICE_SECRET = "sub2_device_secret"
SUB3_PRODUCT_KEY = "sub3_product_key"
SUB3_PRODUCT_SECRET = "sub3_product_secret"
SUB3_DEVICE_KEY = "sub3_device_key"
SUB3_DEVICE_SECRET = "sub3_device_secret"
EVENT_DICT = {'temp': 120.0, 'desc': 'temp too high'}
# for model_up_raw
RAW_PAYLOAD = bytes([0x01, 0x00, 0x00, 0x00, 0x14, 0x01, 0x00, 0x04, 0x00, 0x00, 0x26, 0xf5])
RAW_PAYLOAD_REPLY = bytes([0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0xc8])
DYNAMIC_ACTIVATE_PRODUCT_KEY = "dynamic_activate_product_key"
DYNAMIC_ACTIVATE_PRODUCT_SECRET = "dynamic_activate_product_secret"
DYNAMIC_ACTIVATE_DEVICE_KEY = "dynamic_activate_device_key"
SUB_DEVICE_NAME = (StringI18n("auto_test OldDeviceEnosApiTest device1111"))
SUB_DEVICE_NAME.set_localized_value(locale.setlocale(locale.LC_ALL, 'zh_CN'), '中文设备01')
SUB_DEVICE_NAME.set_localized_value(locale.setlocale(locale.LC_ALL, 'en_US'), 'eng_dev_01')
CLIENT = MqttClient(TCP_SERVER_URL, GW2_PRODUCT_KEY, GW2_DEVICE_KEY, GW2_DEVICE_SECRET)
SUB_DEVICE = DeviceCredential(SUB_PRODUCT_KEY, None, SUB_DEVICE_KEY, SUB_DEVICE_SECRET)
# for report measure_points
MEASURE_POINTS = {'measurepoint1': 99, 'measurepoint2': 00}
# for attributes query
ATTR = {'attribute1': 4, 'attribute2': 5}
ATTRIBUTES_KEY = {'attribute1', 'attribute2'}
EVENTS_VALUE = {'time': 2, 'weather': 6.0}
# for add sub_devices topo test
SUB_DERVICES_FOR_ADD_TOPO = list()
SUB_DERVICES_FOR_TOPO1 = SubDeviceInfo(SUB2_PRODUCT_KEY, SUB2_DEVICE_KEY, SUB2_DEVICE_SECRET)
SUB_DERVICES_FOR_TOPO2 = SubDeviceInfo(SUB_PRODUCT_KEY_RAW, SUB_DEVICE_KEY_RAW, SUB_DEVICE_SECRET_RAW)
SUB_DERVICES_FOR_ADD_TOPO.append(SUB_DERVICES_FOR_TOPO1)
SUB_DERVICES_FOR_ADD_TOPO.append(SUB_DERVICES_FOR_TOPO2)
# for delete sub_devices topo test
SUB_DERVICES_FOR_DELETE_TOPO = list()
SUB_DERVICES_FOR_TOPO1 = (SUB2_PRODUCT_KEY, SUB2_DEVICE_KEY)
SUB_DERVICES_FOR_TOPO2 = (SUB_PRODUCT_KEY_RAW, SUB_DEVICE_KEY_RAW)
SUB_DERVICES_FOR_DELETE_TOPO.append(SUB_DERVICES_FOR_TOPO1)
SUB_DERVICES_FOR_DELETE_TOPO.append(SUB_DERVICES_FOR_TOPO2)
SUB_DEVICES = list()
SUB_DEVICES.append(DeviceCredential(SUB1_PRODUCT_KEY, "", SUB1_DEVICE_KEY, SUB1_DEVICE_SECRET))
SUB_DEVICES.append(DeviceCredential(SUB3_PRODUCT_KEY, "", SUB3_DEVICE_KEY, SUB3_DEVICE_SECRET))
TAGS = {'tag1': 30, 'tag2': 40}
ATTRIBUTES = {'attribute': 1, 'attribute2': 3}
@classmethod
def get_device_static_login(cls):
return NormalDeviceLogin(cls.TCP_SERVER_URL, cls.GW_PRODUCT_KEY, cls.GW_DEVICE_KEY, cls.GW_DEVICE_SECRET)
@classmethod
def get_device_static_ssl_login(cls):
return NormalDeviceLogin(cls.SSL_SERVER_URL, cls.GW_PRODUCT_KEY, cls.GW_DEVICE_KEY, cls.GW_DEVICE_SECRET)
@classmethod
def get_device_dynamic_login(cls):
return DynamicActivationLogin(cls.TCP_SERVER_URL, cls.DYNAMIC_ACTIVATE_PRODUCT_KEY,
cls.DYNAMIC_ACTIVATE_PRODUCT_SECRET, cls.DYNAMIC_ACTIVATE_DEVICE_KEY)
@classmethod
def get_message_integration_login(cls):
return MessageIntegrationLogin(cls.TCP_SERVER_URL, cls.GW_PRODUCT_KEY, cls.GW_PRODUCT_SECRET)
@classmethod
def get_message_integration_up_raw_login(cls):
return MessageIntegrationLogin(cls.TCP_SERVER_URL, cls.SUB_PRODUCT_KEY_RAW, cls.SUB_PRODUCT_SECRET_RAW)
| 42.339744 | 113 | 0.77835 |
706a751f7747ce19d0f0ab0e4870bea11f0f2794 | 6,188 | py | Python | other_implementations/CN-CRYPTONUMERICS/protect/privacy.py | lucaghislo/thesis_work | 95b45147c7c8294b32c2a12a50e83364b093029d | [
"MIT"
] | null | null | null | other_implementations/CN-CRYPTONUMERICS/protect/privacy.py | lucaghislo/thesis_work | 95b45147c7c8294b32c2a12a50e83364b093029d | [
"MIT"
] | null | null | null | other_implementations/CN-CRYPTONUMERICS/protect/privacy.py | lucaghislo/thesis_work | 95b45147c7c8294b32c2a12a50e83364b093029d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding=utf8
from abc import ABC, abstractmethod
from enum import Enum, auto
__all__ = ['KAnonymity']
class PrivacyModel(ABC):
def _get_config(self):
return {
'privacyModelType': self._model_name,
'params': self._get_params()
}
@abstractmethod
def _get_params(self):
pass
def __repr__(self):
return f'<{self.__class__.__name__}: {repr(self._get_params())}>'
class GlobalPrivacyModel(PrivacyModel):
pass
class AttributePrivacyModel(PrivacyModel):
def _validate(self, col_type, col_kind, col_id, col_hierarchy):
return True
class KAnonymity(GlobalPrivacyModel):
'''k-Anonymity privacy model
Implements a k-Anonymity algorithm as described in *k-anonymity: A
model for protecting privacy, Sweeney L., 2002*. Requires that at
least one attribute is set as a quasi-identifier.
Parameters
----------
k : int
The **k** parameter as described in the paper, specifying the minimum
equivalence class size desired.
'''
_model_name = 'K_ANONYMITY'
def __init__(self, k):
'''
'''
self.k = k
@property
def k(self):
'''The **k** parameter itself. Can be read as a property:
>>> prot.k
3
Alternatively, can be used to change the **k** parameter:
>>> prot.k = 3
'''
return self._k
@k.setter
def k(self, k):
try:
k = int(k)
except (ValueError, TypeError):
raise ValueError('Must provide an integer to KAnonymity')
if k <= 0:
raise ValueError('KAnonymity requires k >= 1')
self._k = k
def _get_params(self):
return {'k': self.k}
class EDDifferentialPrivacy(GlobalPrivacyModel):
'''(ε,δ)-Differential Privacy
Implements (ε,δ)-Differential Privacy using the SafePub algorithm based on
(k,β)-SDGS.
Note: Coming soon. Please contact us for more info at
`support <mailto:support@cryptonumerics.com>`_\\.
Parameters
----------
epsilon : float
The ε parameter for (ε,δ)-Differential Privacy
delta : float
The δ parameter for (ε,δ)-Differential Privacy
degree : str
Degree of generalization to apply to the dataset. One of 'none', 'low',
'low_medium', 'medium', 'medium_high', 'high', or 'complete'.
'''
_model_name = 'NOT_IMPLEMENTED'
class DGScheme(Enum):
NONE = auto()
LOW = auto()
LOW_MEDIUM = auto()
MEDIUM = auto()
MEDIUM_HIGH = auto()
HIGH = auto()
COMPLETE = auto()
def __init__(self, epsilon, delta, degree=None):
# XXX: Should we check for very large values?
raise NotImplementedError('Only available in paid version, please '
'contact support@cryptonumerics.com')
self.epsilon = epsilon
self.delta = delta
self.degree = degree
@property
def epsilon(self):
return self._epsilon
@epsilon.setter
def epsilon(self, epsilon):
try:
epsilon = float(epsilon)
except (ValueError, TypeError):
raise ValueError('Epsilon must be an float')
if epsilon <= 0:
raise ValueError('Epsilon must have a positive value')
self._epsilon = epsilon
@property
def delta(self):
return self._delta
@delta.setter
def delta(self, delta):
try:
delta = float(delta)
except (ValueError, TypeError):
raise ValueError('Epsilon must be an float')
if delta <= 0:
raise ValueError('Delta must have a positive value')
self._delta = delta
@property
def degree(self):
return self._degree
@degree.setter
def degree(self, degree):
if degree is None:
self._degree = None
return
try:
self._degree = EDDifferentialPrivacy.DGScheme[degree.upper()]
except KeyError:
raise ValueError(
f'Invalid generalization degree {degree}, valid values '
f'are: '
f'{EDDifferentialPrivacy.DGScheme.__members__.keys()}')
def _get_params(self):
params = {
'epsilon': self.epsilon,
'delta': self.delta,
}
if self.degree is not None:
params['degree'] = self.degree.name
return params
# class TCloseness(AttributePrivacyModel):
# def __init__(self, t, hierarchical=False):
# self.t = t
# if hierarchical:
# self._model_name = 'HIERARCHICAL_DISTANCE_T_CLOSENESS'
# else:
# self._model_name = 'EQUAL_DISTANCE_T_CLOSENESS'
# def _validate(self, col_type, col_kind, col_id, col_hier):
# if (self._model_name == 'HIERARCHICAL_DISTANCE_T_CLOSENESS'
# and col_hier is None):
# raise ValueError('Must supply hierarchies when using t-Closeness '
# 'in hierarchical mode')
# return True
# @property
# def t(self):
# return self._t
# @t.setter
# def t(self, t):
# if t <= 0:
# raise ValueError('t must be greater than 0')
# try:
# self._t = t
# except (ValueError, TypeError):
# raise ValueError('t must be representable as a float')
# def _get_params(self):
# return {'t': self.t}
# class LDiversity(AttributePrivacyModel):
# def __init__(self, l, entropy=False):
# self.l = l
# if entropy:
# self._model_name = 'ENTROPY_L_DIVERSITY'
# else:
# self._model_name = 'DISTINCT_L_DIVERSITY'
# @property
# def l(self):
# return self._l
# @l.setter
# def l(self, l):
# if l <= 0:
# raise ValueError('l must be greater than 0')
# try:
# self._l = l
# except (ValueError, TypeError):
# raise ValueError('l must be representable as a float')
# def _get_params(self):
# return {'l': self.l}
| 26.444444 | 80 | 0.573206 |
b6587e1f3643a1b0946f7404dc1294c49cbb8f68 | 2,919 | py | Python | src/utils.py | BUCS640/Combined-CNN-RNN-for-emotion-recognition | fc9f7a79b4d6901319c63005f11b7dae36cac2f5 | [
"MIT"
] | 32 | 2020-06-23T10:13:38.000Z | 2022-03-27T07:04:09.000Z | src/utils.py | BUCS640/Combined-CNN-RNN-for-emotion-recognition | fc9f7a79b4d6901319c63005f11b7dae36cac2f5 | [
"MIT"
] | 1 | 2021-07-15T07:45:18.000Z | 2021-07-19T08:12:05.000Z | src/utils.py | BUCS640/Combined-CNN-RNN-for-emotion-recognition | fc9f7a79b4d6901319c63005f11b7dae36cac2f5 | [
"MIT"
] | 13 | 2020-08-04T05:02:54.000Z | 2022-03-27T06:38:01.000Z | import os
import cv2
import pandas as pd
from src.config import PROCESSED_FRAMES_DATA_DIR, RAW_VIDEO_DATA_DIR
from src.data import DataSet
class VideoHelper():
"""
Get meta-information from videos. Private methods are used only locally because we don't save videos on the server
"""
def __init__(self):
video_lengths_df = pd.read_csv('data/video_lengths.csv', sep=',')
video_lengths = {}
for filename, length in video_lengths_df.values:
video_lengths[filename] = length
self.video_lengths = video_lengths
def get_num_frames(self, video_filename):
"""
Get number of frames in particular video
:param video_filename: video filename without extension
:return: number of frames
"""
return self.video_lengths[video_filename]
def get_private_test_video_filenames(self):
"""
Get test video filenames that have not annotations
:return: list of video filenames without extension
"""
return list(self.video_lengths.keys())
@staticmethod
def extract_video_lengths():
"""
Extract number of frames from videos and write to 'video_lengths.csv'
"""
private_test_video_filenames = VideoHelper._extract_private_test_video_filenames()
video_lengths = {}
for video_filename in private_test_video_filenames:
video_lengths[video_filename] = VideoHelper._extract_num_frames(video_filename)
video_lengths_df = pd.DataFrame(video_lengths.items())
video_lengths_df.to_csv('data/video_lengths.csv', sep=',', index=False)
@staticmethod
def _clear_video_filename(video_filename):
video_filename = video_filename.replace('_right', '')
video_filename = video_filename.replace('_left', '')
return video_filename
@staticmethod
def _extract_num_frames(video_filename):
video_filename = VideoHelper._clear_video_filename(video_filename)
video_path = os.path.join(RAW_VIDEO_DATA_DIR, video_filename + '.mp4')
if not os.path.isfile(video_path):
video_path = os.path.join(RAW_VIDEO_DATA_DIR, video_filename + '.avi')
cap = cv2.VideoCapture(video_path)
length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
return length
@staticmethod
def _extract_private_test_video_filenames():
all_video_filenames = set(os.listdir(PROCESSED_FRAMES_DATA_DIR))
dataset = DataSet()
train_test_video_filenames = set(dataset.data['train'].keys()) | set(dataset.data['test'].keys())
private_test_video_filenames = all_video_filenames - train_test_video_filenames
private_test_video_filenames = [video_filename for video_filename in private_test_video_filenames if
not video_filename[0] == '.']
return private_test_video_filenames
| 36.4875 | 118 | 0.690647 |
fe54b0ae74778148222499cfd8d3f1bf296b5f8d | 5,479 | py | Python | flask_unchained/bundles/api/extensions/marshmallow.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 69 | 2018-10-10T01:59:11.000Z | 2022-03-29T17:29:30.000Z | flask_unchained/bundles/api/extensions/marshmallow.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 18 | 2018-11-17T12:42:02.000Z | 2021-05-22T18:45:27.000Z | flask_unchained/bundles/api/extensions/marshmallow.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 7 | 2018-10-12T16:20:25.000Z | 2021-10-06T12:18:21.000Z | import marshmallow as ma
import flask_marshmallow as flask_ma
from flask_marshmallow.sqla import HyperlinkRelated
from flask_unchained import FlaskUnchained
from ..model_serializer import ModelSerializer
class Marshmallow:
"""
The `Marshmallow` extension::
from flask_unchained.bundles.api import ma
Allows decorating a :class:`~flask_unchained.bundles.api.ModelSerializer`
with :meth:`serializer` to specify it should be used for creating objects,
listing them, or as the fallback.
Also provides aliases from the following modules:
**flask_unchained.bundles.api**
.. autosummary::
~flask_unchained.bundles.api.ModelSerializer
**marshmallow.decorators**
.. autosummary::
~marshmallow.decorators.pre_load
~marshmallow.decorators.post_load
~marshmallow.decorators.pre_dump
~marshmallow.decorators.post_dump
~marshmallow.decorators.validates
~marshmallow.decorators.validates_schema
**marshmallow.exceptions**
.. autosummary::
~marshmallow.exceptions.ValidationError
**marshmallow.fields**
.. autosummary::
# alias marshmallow fields
~marshmallow.fields.Bool
~marshmallow.fields.Boolean
~marshmallow.fields.Constant
~marshmallow.fields.Date
~marshmallow.fields.DateTime
~marshmallow.fields.NaiveDateTime
~marshmallow.fields.AwareDateTime
~marshmallow.fields.Decimal
~marshmallow.fields.Dict
~marshmallow.fields.Email
~marshmallow.fields.Field
~marshmallow.fields.Float
~marshmallow.fields.Function
~marshmallow.fields.Int
~marshmallow.fields.Integer
~marshmallow.fields.List
~marshmallow.fields.Mapping
~marshmallow.fields.Method
~marshmallow.fields.Nested
~marshmallow.fields.Number
~marshmallow.fields.Pluck
~marshmallow.fields.Raw
~marshmallow.fields.Str
~marshmallow.fields.String
~marshmallow.fields.Time
~marshmallow.fields.TimeDelta
~marshmallow.fields.Tuple
~marshmallow.fields.UUID
~marshmallow.fields.Url
~marshmallow.fields.URL
**flask_marshmallow.fields**
.. autosummary::
~flask_marshmallow.fields.AbsoluteUrlFor
~flask_marshmallow.fields.AbsoluteURLFor
~flask_marshmallow.fields.UrlFor
~flask_marshmallow.fields.URLFor
~flask_marshmallow.fields.Hyperlinks
**flask_marshmallow.sqla**
.. autosummary::
~flask_marshmallow.sqla.HyperlinkRelated
"""
def __init__(self):
self.ModelSerializer = ModelSerializer
# alias marshmallow stuffs
self.pre_load = ma.pre_load
self.post_load = ma.post_load
self.pre_dump = ma.pre_dump
self.post_dump = ma.post_dump
self.validates = ma.validates
self.validates_schema = ma.validates_schema
self.ValidationError = ma.ValidationError
# alias marshmallow fields
self.Bool = ma.fields.Bool
self.Boolean = ma.fields.Boolean
self.Constant = ma.fields.Constant
self.Date = ma.fields.Date
self.DateTime = ma.fields.DateTime
self.NaiveDateTime = ma.fields.NaiveDateTime
self.AwareDateTime = ma.fields.AwareDateTime
self.Decimal = ma.fields.Decimal
self.Dict = ma.fields.Dict
self.Email = ma.fields.Email
self.Field = ma.fields.Field
self.Float = ma.fields.Float
self.Function = ma.fields.Function
self.Int = ma.fields.Int
self.Integer = ma.fields.Integer
self.List = ma.fields.List
self.Mapping = ma.fields.Mapping
self.Method = ma.fields.Method
self.Nested = ma.fields.Nested
self.Number = ma.fields.Number
self.Pluck = ma.fields.Pluck
self.Raw = ma.fields.Raw
self.Str = ma.fields.Str
self.String = ma.fields.String
self.Time = ma.fields.Time
self.TimeDelta = ma.fields.TimeDelta
self.Tuple = ma.fields.Tuple
self.UUID = ma.fields.UUID
self.Url = ma.fields.Url
self.URL = ma.fields.URL
# alias flask_marshmallow fields
self.AbsoluteUrlFor = flask_ma.fields.AbsoluteUrlFor
self.AbsoluteURLFor = flask_ma.fields.AbsoluteURLFor
self.UrlFor = flask_ma.fields.UrlFor
self.URLFor = flask_ma.fields.URLFor
self.Hyperlinks = flask_ma.fields.Hyperlinks
self.HyperlinkRelated = HyperlinkRelated
def init_app(self, app: FlaskUnchained):
db = app.extensions['sqlalchemy'].db
self.ModelSerializer.OPTIONS_CLASS.session = db.session
app.extensions['ma'] = self
def serializer(self, create=False, many=False):
"""
Decorator to mark a :class:`Serializer` subclass for a specific purpose, ie,
to be used during object creation **or** for serializing lists of objects.
:param create: Whether or not this serializer is for object creation.
:param many: Whether or not this serializer is for lists of objects.
"""
if create and many:
raise Exception('Can only set one of `create` or `many` to `True`')
def wrapper(cls):
cls.__kind__ = (create and 'create'
or many and 'many'
or 'all')
return cls
return wrapper
| 31.854651 | 84 | 0.655047 |
ce4e17f9bfcd8f65763315b3ccb4429f66e35a11 | 301 | py | Python | app/views.py | GrishonNganga/Elevator-Pitch | ae18886cae72b60f2d1069ec40f64d65cfb85905 | [
"MIT"
] | null | null | null | app/views.py | GrishonNganga/Elevator-Pitch | ae18886cae72b60f2d1069ec40f64d65cfb85905 | [
"MIT"
] | null | null | null | app/views.py | GrishonNganga/Elevator-Pitch | ae18886cae72b60f2d1069ec40f64d65cfb85905 | [
"MIT"
] | 1 | 2020-12-09T15:39:11.000Z | 2020-12-09T15:39:11.000Z | from flask import Blueprint
from flask import render_template
from flask_login import current_user
app = Blueprint('app', __name__)
@app.context_processor
def inject_user():
return dict(user=current_user)
@app.route('/')
def home():
return render_template('home.html', user = current_user)
| 21.5 | 60 | 0.76412 |
11b23d03ba615edaff8766c884d940bf485961d6 | 255,083 | py | Python | tests/admin_views/tests.py | allanice001/django | aac75fa30af22965f993c6dd8da8cd9882ba2f21 | [
"BSD-3-Clause"
] | null | null | null | tests/admin_views/tests.py | allanice001/django | aac75fa30af22965f993c6dd8da8cd9882ba2f21 | [
"BSD-3-Clause"
] | 1 | 2021-03-24T12:21:05.000Z | 2021-03-24T12:31:52.000Z | tests/admin_views/tests.py | allanice001/django | aac75fa30af22965f993c6dd8da8cd9882ba2f21 | [
"BSD-3-Clause"
] | 2 | 2021-03-24T12:11:48.000Z | 2021-06-10T19:56:03.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import re
import unittest
from django.contrib.admin import ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.validation import ModelAdminValidator
from django.contrib.admin.views.main import IS_POPUP_VAR
# Register auth models with the admin.
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.exceptions import ImproperlyConfigured
from django.core.files import temp as tempfile
from django.core.urlresolvers import (
NoReverseMatch, get_script_prefix, reverse, set_script_prefix,
)
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from django.test.utils import patch_logger
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.cache import get_max_age
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Article, BarAccount, Book, Category,
Chapter, Character, Child, Choice, City, Collector, Color2,
ComplexSortedPerson, CoverLetter, CustomArticle, DooHickey, Employee,
EmptyModel, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, ModelWithStringPrimaryKey, OtherStory, Paper, Parent,
ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture,
Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post, Promo,
Question, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant,
RowLevelChangePermissionModel, Section, ShortMessage, Simple, Story,
Subscriber, Telegram, Topping, UnchangeableObject, UndeletableObject,
UnorderedObject, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
class AdminFieldExtractionMixin(object):
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
admin_form = response.context['adminform']
fieldsets = list(admin_form)
field_lines = []
for fieldset in fieldsets:
field_lines += list(fieldset)
fields = []
for field_line in field_lines:
fields += list(field_line)
return fields
def get_admin_fields(self, response):
"""
Return the fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response)]
def get_admin_field(self, response, field_name):
"""
Return the field for the given field_name.
"""
fields = self.get_admin_fields(response)
for field in fields:
name = field.field['name'] if isinstance(field.field, dict) else field.field.name
if name == field_name:
return field
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml',
'admin-views-fabrics.xml', 'admin-views-books.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(response.content.index(force_bytes(text1)), response.content.index(force_bytes(text2)),
failing_msg)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response")
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(1,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
self.assertContains(response, 'title with a new\\u000Aline')
# Post data for edit inline
inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": "1",
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": "1",
"article_set-1-id": "2",
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": "3",
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
response = self.client.post(reverse('admin:admin_views_section_change', args=(1,)), self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order.")
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order.")
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(response, '2009,', '2008,',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008,', '2000,',
"Results of sorting on ModelAdmin method are out of order.")
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(response, '2000,', '2008,',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008,', '2009,',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view")
self.assertNotContains(response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to")
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(1,))
link2 = reverse('admin:admin_views_fabric_change', args=(2,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = "Changelist table isn't showing the right human-readable values set by a model field 'choices' option named group."
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = "Changelist filter isn't showing options contained inside a model field 'choices' option named group."
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response,
'<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response,
'<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.gif')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_change', args=(section.pk,)), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_delete', args=(section.pk,)), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
response = self.client.get("%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist'))
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, "opener.dismissRelatedLookupPopup(window, '%s')" % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
response = self.client.get(reverse('admin:index'))
self.assertNotContains(response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.')
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_proxy_model_content_type_is_used_for_log_entries(self):
"""
Log entries for proxy models should have the proxy model's content
type.
Regression test for #21084.
"""
color2_content_type = ContentType.objects.get_for_model(Color2, for_concrete_model=False)
# add
color2_add_url = reverse('admin:admin_views_color2_add')
self.client.post(color2_add_url, {'value': 'orange'})
color2_addition_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_addition_log.content_type)
# change
color_id = color2_addition_log.object_id
color2_change_url = reverse('admin:admin_views_color2_change', args=(color_id,))
self.client.post(color2_change_url, {'value': 'blue'})
color2_change_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_change_log.content_type)
# delete
color2_delete_url = reverse('admin:admin_views_color2_delete', args=(color_id,))
self.client.post(color2_delete_url)
color2_delete_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_delete_log.content_type)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's templates dir in DIRS to take precedence over the admin's
# templates dir.
'DIRS': [os.path.join(os.path.dirname(upath(__file__)), 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_change_password(self):
"""
Ensure that the auth/user/change_password.html template uses block
super in the bodyclass block.
"""
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
fixtures = ["admin-views-users.xml"]
current_app = "admin3"
def setUp(self):
self.client.login(username='super', password='secret')
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(1,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'jquery.js')
self.assertContains(response, 'jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'jquery.js')
self.assertNotContains(response, 'jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
self.client.post(reverse('admin:admin_views_person_change', args=(1,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=1)), 1)
def test_save_as_display(self):
"""
Ensure that 'save as' is displayed when activated and after submitting
invalid data aside save_as_new will not show us a form to overwrite the
initial model.
"""
change_url = reverse('admin:admin_views_person_change', args=(1,))
response = self.client.get(change_url)
self.assertTrue(response.context['save_as'])
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 3, 'alive': 'checked'}
response = self.client.post(change_url, post_data)
self.assertEqual(response.context['form_url'], reverse('admin:admin_views_person_add'))
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
try:
response = self.client.get(reverse('admin2:index'))
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.login(username='super', password='secret')
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.login(username='super', password='secret')
su = User.objects.get(username='super')
try:
response = self.client.get(
reverse('admin4:auth_user_password_change', args=(su.pk,))
)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
fixtures = ['admin-views-users.xml']
def setUp(self):
"""Test setup."""
# Setup permissions, for our users who can add, change, and delete.
# We can't put this into the fixture, because the content type id
# and the permission id could be different on each run of the test.
opts = Article._meta
# User who can add Articles
add_user = User.objects.get(username='adduser')
add_user.user_permissions.add(get_perm(Article,
get_permission_codename('add', opts)))
# User who can change Articles
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Article,
get_permission_codename('change', opts)))
change_user2 = User.objects.get(username='nostaff')
change_user2.user_permissions.add(get_perm(Article,
get_permission_codename('change', opts)))
# User who can delete Articles
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Article,
get_permission_codename('delete', opts)))
delete_user.user_permissions.add(get_perm(Section,
get_permission_codename('delete', Section._meta)))
# login POST dicts
self.index_url = reverse('admin:index')
self.super_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'super',
'password': 'secret',
}
self.super_email_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'super@example.com',
'password': 'secret',
}
self.super_email_bad_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'super@example.com',
'password': 'notsecret',
}
self.adduser_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'adduser',
'password': 'secret',
}
self.changeuser_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'changeuser',
'password': 'secret',
}
self.deleteuser_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'deleteuser',
'password': 'secret',
}
self.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
self.joepublic_login = {
REDIRECT_FIELD_NAME: self.index_url,
'username': 'joepublic',
'password': 'secret',
}
self.no_username_login = {
REDIRECT_FIELD_NAME: self.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
# Change User should not have access to add articles
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.all().count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# Super can add too, but is redirected to the change list view
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.all().count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
article_change_url = reverse('admin:admin_views_article_change', args=(1,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=1).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error')
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors')
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view(self):
"""History view should restrict access."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(1,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(1,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
RowLevelChangePermissionModel.objects.create(id=2, name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get(reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(1,)))
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(2,)))
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get(
reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(1,)), follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get(
reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(2,)), follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.post(reverse('admin:login'), self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)))
self.assertEqual(response.status_code, 404)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Set up and log in user.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
self.client.post(login_url, self.adduser_login)
# The user can't add sections yet, so they shouldn't see the "add
# section" link.
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add
# section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
login_url = reverse('admin:login')
# Set up and log in user.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
self.client.post(login_url, self.adduser_login)
# The user can't change sections yet, so they shouldn't see the "change
# section" link.
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change
# section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
login_url = reverse('admin:login')
# Set up and log in user.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
# The user can't delete sections yet, so they shouldn't see the "delete
# section" link.
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete
# section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_custom_model_admin_templates(self):
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
self.client.get(reverse('admin:logout'))
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(1,))
# add user should not be able to delete articles
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get(reverse('admin:logout'))
# Delete user can delete
self.client.get(self.index_url)
self.client.post(login_url, self.deleteuser_login)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(1,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/1/")
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/1/")
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.all().count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, '1')
self.client.get(reverse('admin:logout'))
def test_disabled_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.post(login_url, self.changeuser_login)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.login(username='super', password='secret')
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
self.client.post(login_url, self.super_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get(reverse('admin:logout'))
self.client.post(login_url, self.adduser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get(reverse('admin:logout'))
self.client.post(login_url, self.changeuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get(reverse('admin:logout'))
self.client.post(login_url, self.deleteuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get(reverse('admin:logout'))
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
login_url = '%s?next=%s' % (reverse('admin7:login'), reverse('admin7:index'))
index_url = reverse('admin7:index')
self.client.post(login_url, self.super_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get(reverse('admin7:logout'))
self.client.post(login_url, self.adduser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get(reverse('admin7:logout'))
self.client.post(login_url, self.changeuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get(reverse('admin7:logout'))
self.client.post(login_url, self.deleteuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get(reverse('admin7:logout'))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
fixtures = ['admin-views-users.xml']
def setUp(self):
opts = Report._meta
# User who can change Reports
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Report,
get_permission_codename('change', opts)))
# login POST dict
self.changeuser_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'changeuser',
'password': 'secret',
}
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.get(reverse('admin:index'))
r = self.client.post(reverse('admin:login'), self.changeuser_login)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
fixtures = ['admin-views-users.xml', 'deleted-objects.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(1,)),
reverse('admin:admin_views_plotdetails_change', args=(1,)))
))
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(1,)))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(1,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(1,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(1,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot,
get_permission_codename('delete', Plot._meta)))
self.assertTrue(self.client.login(username='deleteuser',
password='secret'))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(1,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(1,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(1,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(1,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(2,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(2,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(2,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(3,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_supervillain_change', args=(3,)),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(3,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(3,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = Plot.objects.get(pk=3)
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(3,)))
self.assertContains(response, should_contain)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
fixtures = ['admin-views-users.xml', 'deleted-objects.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_generic_content_object_in_list_display(self):
plot = Plot.objects.get(pk=3)
FunkyTag.objects.create(content_object=plot, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % plot)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
fixtures = ['admin-views-users.xml', 'string-primary-key.xml']
def __init__(self, *args):
super(AdminViewStringPrimaryKeyTest, self).__init__(*args)
self.pk = """abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 -_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
def setUp(self):
self.client.login(username='super', password='secret')
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
LogEntry.objects.log_action(100, content_type_pk, self.pk, self.pk, 2, change_message='Changed something')
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_without_content_type(self):
"If a LogEntry is missing content_type it will not display it in span tag under the hyperlink."
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
should_contain = "Model with string primary key" # capitalized in Recent Actions
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__model__iexact='modelwithstringprimarykey')
# http://code.djangoproject.com/ticket/10275
# if the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get(reverse('admin:index'))
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1,
counted_presence_after)
def test_logentry_get_admin_url(self):
"LogEntry.get_admin_url returns a URL to edit the entry's object or None for non-existent (possibly deleted) models"
log_entry_model = "modelwithstringprimarykey" # capitalized in Recent Actions
logentry = LogEntry.objects.get(content_type__model__iexact=log_entry_model)
desired_admin_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
self.assertEqual(logentry.get_admin_url(), desired_admin_url)
self.assertIn(iri_to_uri(quote(self.pk)), logentry.get_admin_url())
logentry.content_type.model = "non-existent"
self.assertEqual(logentry.get_admin_url(), None)
def test_logentry_get_edited_object(self):
"LogEntry.get_edited_object returns the edited object of a given LogEntry object"
logentry = LogEntry.objects.get(content_type__model__iexact="modelwithstringprimarykey")
edited_obj = logentry.get_edited_object()
self.assertEqual(logentry.object_id, str(edited_obj.pk))
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add should be visible"
add_model = ModelWithStringPrimaryKey(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse('admin:%s_modelwithstringprimarykey_change' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse('admin:%s_modelwithstringprimarykey_history' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
self.assertContains(response, '<a href="%s" class="historylink"' % expected_link)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
fixtures = ['admin-views-users.xml']
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
fixtures = ['admin-views-unicode.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": "1",
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": "2",
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": "3",
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(1,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "3",
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
response = self.client.get(reverse('admin:admin_views_story_changelist'))
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = OtherStory.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
fixtures = ['admin-views-users', 'multiple-child-classes',
'admin-views-person']
def setUp(self):
self.client.login(username='super', password='secret')
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminActionsTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 3</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)),
html=True
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)),
html=True
)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist'))
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response")
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertContains(response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions")
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
reverse('admin:admin_views_subscriber_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Ensure that data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 4 queries are expected: 1 for the session, 1 for the user,
# 1 for the count and 1 for the objects on the page
with self.assertNumQueries(4):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
# here one more count(*) query will run, because filters were applied
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get(reverse('admin:admin_views_emptymodel_change', args=(i,)))
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The cover letter "Candidate, Best" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The short message "ShortMessage object" was added successfully.</li>',
html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The telegram "Urgent telegram" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The paper "Paper object" was added successfully.</li>',
html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The cover letter "John Doe II" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "ShortMessage_Deferred_timestamp object" was changed successfully.</li>',
html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The telegram "Telegram without typo" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "Paper_Deferred_author object" was changed successfully.</li>',
html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
def setUp(self):
self.client.login(username='super', password='secret')
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml', 'admin-views-fabrics.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(1,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(1,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(1,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertEqual(get_max_age(response), None)
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertEqual(get_max_age(response), None)
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertEqual(get_max_age(response), None)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "id: '#id_slug',")
self.assertContains(response, "field['dependency_ids'].push('#id_title');")
self.assertContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(1,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, "id: '#id_slug'")
self.assertNotContains(response, "field['dependency_ids'].push('#id_title');")
self.assertNotContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, "maxLength: 1000") # instead of 1,000
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SeleniumAdminViewsFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_views'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['admin-views-users.xml']
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_mainprepopulated_add')))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_css_selector('#id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_name').send_keys(' this is the mAin nÀMë and it\'s awεšome')
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesome-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesome')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-name').send_keys(' here is a sŤāÇkeð inline ! ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-name').send_keys(' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo') # 50 characters maximum for slug1 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo') # 60 characters maximum for slug2 field
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-name').send_keys('And now, with a tÃbűlaŘ inline !!!')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-name').send_keys('a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšome',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesome-2012-02-18',
slug2='option-two-main-name-and-its-awesome',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', # 75 characters in name field
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super',
password='secret',
login_url=reverse('admin:index'))
object_url = '%s%s' % (
self.live_server_url,
reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)))
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_article_add')))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('fieldsetcollapser0').text,
"Hide"
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_picture_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_reservation_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Wait until we're on the delete page.
self.wait_for('.cancel-link')
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Wait until we're on the delete page.
self.wait_for('.cancel-link')
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('<i>edited section</i>')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, '<i>edited section</i>')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
class SeleniumAdminViewsChromeTests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumAdminViewsIETests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response,
"<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
self.assertContains(response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>', html=True)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br /><br />test<br /><br />test<br /><br />test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br />link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_field(response, 'plotdetails')
self.assertEqual(field.contents(), 'Brand New Plot')
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_field(response, 'plotdetails')
self.assertEqual(force_text(field.contents()), '(None)')
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'],
["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissChangeRelatedObjectPopup')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissDeleteRelatedObjectPopup')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse('admin:admin_views_section_delete', args=(1,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'),
action_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(
response, '<th class="field-name">')
self.assertContains(
response, '<td class="field-release_date nowrap">')
self.assertContains(
response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
fixtures = ['admin-views-users.xml']
def test_logout(self):
self.client.login(username='super', password='secret')
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
fixtures = ['admin-views-users.xml']
admin_site = site
def setUp(self):
self.client.login(username='super', password='secret')
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(105,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(change_user_url)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return 104
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">joepublic</a>',
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
original_prefix = get_script_prefix()
try:
set_script_prefix('/prefix/')
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
finally:
set_script_prefix(original_prefix)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-restaurants.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
CityAdmin.view_on_site = True
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = False
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = []
self.assertEqual(CityAdmin.check(City), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
hint=None,
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(1,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(1,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(1,)))
worker = Worker.objects.get(pk=1)
self.assertContains(response, '"/worker/%s/%s/"' % (worker.surname, worker.name))
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-restaurants.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(1,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(1,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(1,)))
worker = Worker.objects.get(pk=1)
self.assertContains(response, '"/worker_inline/%s/%s/"' % (worker.surname, worker.name))
class AdminGenericRelationTests(TestCase):
def test_generic_relation_fk_list_filter(self):
"""
Validates a model with a generic relation to a model with
a foreign key can specify the generic+fk relationship
path as a list_filter. See trac #21428.
"""
class GenericFKAdmin(ModelAdmin):
list_filter = ('tags__content_type',)
validator = ModelAdminValidator()
try:
validator.validate_list_filter(GenericFKAdmin, Plot)
except ImproperlyConfigured:
self.fail("Couldn't validate a GenericRelation -> FK path in ModelAdmin.list_filter")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestEtagWithAdminView(TestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
)
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| 45.88649 | 251 | 0.652729 |
8f72eeb07698a2cf82c13a76f71f869cf076bee1 | 3,341 | py | Python | tests/basics/Classes32.py | juanfra684/Nuitka | 0e276895fadabefb598232f2ccf8cc7736c9a85b | [
"Apache-2.0"
] | 1 | 2022-01-01T18:29:24.000Z | 2022-01-01T18:29:24.000Z | tests/basics/Classes32.py | juanfra684/Nuitka | 0e276895fadabefb598232f2ccf8cc7736c9a85b | [
"Apache-2.0"
] | 1 | 2021-06-10T06:47:04.000Z | 2021-06-10T06:47:04.000Z | tests/basics/Classes32.py | juanfra684/Nuitka | 0e276895fadabefb598232f2ccf8cc7736c9a85b | [
"Apache-2.0"
] | null | null | null | # Copyright 2020, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
try:
from collections.abc import OrderedDict
except ImportError:
from collections import OrderedDict
print("Call order of Python3 metaclasses:")
def a():
x = 1
class A:
print("Class body a.A is evaluating closure x", x)
print("Called", a)
return A
def b():
class B:
pass
print("Called", b)
return B
def displayable(dictionary):
return sorted(dictionary.items())
def m():
class M(type):
def __new__(cls, class_name, bases, attrs, **over):
print(
"Metaclass M.__new__ cls",
cls,
"name",
class_name,
"bases",
bases,
"dict",
displayable(attrs),
"extra class defs",
displayable(over),
)
return type.__new__(cls, class_name, bases, attrs)
def __init__(self, name, bases, attrs, **over):
print(
"Metaclass M.__init__",
name,
bases,
displayable(attrs),
displayable(over),
)
super().__init__(name, bases, attrs)
def __prepare__(metacls, bases, **over): # @NoSelf
print("Metaclass M.__prepare__", metacls, bases, displayable(over))
return OrderedDict()
print("Called", m)
return M
def d():
print("Called", d)
return 1
def e():
print("Called", e)
return 2
class C1(a(), b(), other=d(), metaclass=m(), yet_other=e()):
import sys
# TODO: Enable this.
# print("C1 locals type is", type(sys._getframe().f_locals))
print("OK, class created", C1)
print("Attribute C1.__dict__ has type", type(C1.__dict__))
print("Function local classes can be made global and get proper __qualname__:")
def someFunctionWithLocalClassesMadeGlobal():
# Affects __qualname__ only in Python3.4 or higher, not in Python3.2
global C
class C:
pass
class D:
pass
try:
print("Nested class qualname is", D.__qualname__)
except AttributeError:
# Python3.2
pass
try:
print("Local class made global qualname is", C.__qualname__)
except AttributeError:
pass
someFunctionWithLocalClassesMadeGlobal()
print("Function in a class with private name")
class someClassWithPrivateArgumentNames:
def f(self, *, __kw: 1):
pass
print(someClassWithPrivateArgumentNames.f.__annotations__)
print("OK.")
| 22.574324 | 79 | 0.599222 |
8eb7fbd1aaaac87fce0f9b823dd8f83f5a40cdbd | 5,706 | py | Python | gremlin-python/src/main/jython/gremlin_python/process/strategies.py | dylanht/tinkerpop | 1d11b81681b1e9b5499ea9dc3590109a1e5a1569 | [
"Apache-2.0"
] | null | null | null | gremlin-python/src/main/jython/gremlin_python/process/strategies.py | dylanht/tinkerpop | 1d11b81681b1e9b5499ea9dc3590109a1e5a1569 | [
"Apache-2.0"
] | null | null | null | gremlin-python/src/main/jython/gremlin_python/process/strategies.py | dylanht/tinkerpop | 1d11b81681b1e9b5499ea9dc3590109a1e5a1569 | [
"Apache-2.0"
] | null | null | null | '''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
'''
__author__ = 'Marko A. Rodriguez (http://markorodriguez.com)'
from gremlin_python.process.traversal import TraversalStrategy
#########################
# DECORATION STRATEGIES #
#########################
class ConnectiveStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class ElementIdStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
# EventStrategy doesn't make sense outside JVM traversal machine
class HaltedTraverserStrategy(TraversalStrategy):
def __init__(self, halted_traverser_factory=None):
TraversalStrategy.__init__(self)
if halted_traverser_factory is not None:
self.configuration["haltedTraverserFactory"] = halted_traverser_factory
class PartitionStrategy(TraversalStrategy):
def __init__(self, partition_key=None, write_partition=None, read_partitions=None, include_meta_properties=None):
TraversalStrategy.__init__(self)
if partition_key is not None:
self.configuration["partitionKey"] = partition_key
if write_partition is not None:
self.configuration["writePartition"] = write_partition
if write_partition is not None:
self.configuration["readPartitions"] = read_partitions
if include_meta_properties is not None:
self.configuration["includeMetaProperties"] = include_meta_properties
class SubgraphStrategy(TraversalStrategy):
def __init__(self, vertices=None, edges=None, vertex_properties=None):
TraversalStrategy.__init__(self)
if vertices is not None:
self.configuration["vertices"] = vertices
if edges is not None:
self.configuration["edges"] = edges
if vertex_properties is not None:
self.configuration["vertexProperties"] = vertex_properties
class VertexProgramStrategy(TraversalStrategy):
def __init__(self, graph_computer=None, workers=None, persist=None, result=None, vertices=None, edges=None,
configuration=None):
TraversalStrategy.__init__(self)
if graph_computer is not None:
self.configuration["graphComputer"] = graph_computer
if workers is not None:
self.configuration["workers"] = workers
if persist is not None:
self.configuration["persist"] = persist
if result is not None:
self.configuration["result"] = result
if vertices is not None:
self.configuration["vertices"] = vertices
if edges is not None:
self.configuration["edges"] = edges
if configuration is not None:
self.configuration.update(configuration)
###########################
# FINALIZATION STRATEGIES #
###########################
class MatchAlgorithmStrategy(TraversalStrategy):
def __init__(self, match_algorithm=None):
TraversalStrategy.__init__(self)
if match_algorithm is not None:
self.configuration["matchAlgorithm"] = match_algorithm
###########################
# OPTIMIZATION STRATEGIES #
###########################
class AdjacentToIncidentStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class FilterRankingStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class IdentityRemovalStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class IncidentToAdjacentStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class InlineFilterStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class LazyBarrierStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class MatchPredicateStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class OrderLimitStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class PathProcessorStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class PathRetractionStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class RangeByIsCountStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class RepeatUnrollStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class GraphFilterStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
###########################
# VERIFICATION STRATEGIES #
###########################
class LambdaRestrictionStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
class ReadOnlyStrategy(TraversalStrategy):
def __init__(self):
TraversalStrategy.__init__(self)
| 31.01087 | 117 | 0.706449 |
1a107ac62864645ae3d2222a62cace5f3bf68202 | 2,814 | py | Python | src/models/anna.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 73 | 2019-01-26T02:57:24.000Z | 2022-02-15T08:45:11.000Z | src/models/anna.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 9 | 2019-04-09T10:53:41.000Z | 2020-09-11T13:18:26.000Z | src/models/anna.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 15 | 2019-04-07T11:28:57.000Z | 2022-03-29T04:35:48.000Z | import skimage.io as io
import skimage.transform as skt
import numpy as np
from PIL import Image
from src.models.class_patcher import patcher
from src.utils.imgproc import *
class patcher(patcher):
def __init__(self, body='./body/body_anna.png', **options):
super().__init__(name='吸血鬼アンナ', body=body, pantie_position=[31, 1115], **options)
self.mask = io.imread('./mask/mask_anna.png')
self.sign_position = [37, 861]
try:
self.add_sign = self.options['add_sign']
except:
self.add_sign = self.ask(question='Add immoral sign?', default=False)
if self.add_sign:
try:
self.sign = Image.open(self.options['fsign'])
except:
self.sign = Image.open('./material/anna_sign.png')
def convert(self, image):
pantie = np.array(image)
pantie = np.bitwise_and(pantie, self.mask)
[r, c, d] = pantie.shape
# move from hip to front
patch = np.copy(pantie[-170:, 546:, :])
pantie[-100:, 546:, :] = 0
patch = skt.resize(patch[::-1, ::-1, :], (patch.shape[0], 40), anti_aliasing=True, mode='reflect')
[pr, pc, d] = patch.shape
pantie[157:157 + pr, :pc, :] = np.uint8(patch * 255)
# Affine transform matrix
src_cols = np.linspace(0, c, 10)
src_rows = np.linspace(0, r, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
shifter_row = np.zeros(src.shape[0])
shifter_col = np.zeros(src.shape[0])
shifter_row = -(np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 8) * 60)
shifter_row[-30:] += (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]))[-30:] * 50)
shifter_row = np.convolve(shifter_row, np.ones(30) / 30, mode='same')
dst_rows = src[:, 1] + shifter_row - 20
dst_cols = src[:, 0] + shifter_col
dst = np.vstack([dst_cols, dst_rows]).T
affin = skt.PiecewiseAffineTransform()
affin.estimate(src, dst)
pantie = np.uint8(skt.warp(pantie, affin) * 255)
pantie = pantie[:, 6:-35, :]
# Finalize
pantie = skt.resize(pantie, (np.int(pantie.shape[0] * 1.25), np.int(pantie.shape[1] * 1.56)), anti_aliasing=True, mode='reflect')
pantie = np.uint8(pantie * 255)
return Image.fromarray(pantie)
def patch(self, image, transparent=False):
image = self.convert(image)
if transparent:
patched = Image.new("RGBA", self.body_size)
else:
patched = self.body.copy()
if self.add_sign:
self.paste(patched, self.sign, self.sign_position)
patched = self.paste(patched, image, self.pantie_position)
return patched
| 40.2 | 137 | 0.585999 |
78c2a619dc79b8e0ef2ae227143c5ca8e8caa96e | 8,429 | py | Python | line-by-line.py | hkmoon/Line-by-line | a50714ccb3bc679ab417f4219e5717108f31cda4 | [
"MIT"
] | null | null | null | line-by-line.py | hkmoon/Line-by-line | a50714ccb3bc679ab417f4219e5717108f31cda4 | [
"MIT"
] | null | null | null | line-by-line.py | hkmoon/Line-by-line | a50714ccb3bc679ab417f4219e5717108f31cda4 | [
"MIT"
] | null | null | null | import sublime
import sublime_plugin
import os
import subprocess
import re
settingsfile = 'line-by-line.sublime-settings'
# escape double quote
def escape_dq(string):
string = string.replace('\\', '\\\\')
string = string.replace('"', '\\"')
return string
# clean command before sending to Terminal
def clean(cmd):
plat = sublime.platform()
if plat == "osx":
cmd = escape_dq(cmd)
cmd = cmd.rstrip('\n')
if len(re.findall("\n", cmd)) == 0:
cmd = cmd.lstrip()
return cmd
# get platform specific key
def get(plat, key, default=None):
settings = sublime.load_settings(settingsfile)
plat_settings = settings.get(plat)
if key in plat_settings:
return plat_settings[key]
else:
return default
# the main function
def runcmd(cmd):
cmd = clean(cmd)
plat = sublime.platform()
if plat == 'osx':
App = get("osx", "App", "Terminal")
if App == 'Terminal':
args = ['osascript']
args.extend(['-e', 'tell app "Terminal" to do script "' + cmd + '" in front window\n'])
subprocess.Popen(args)
elif re.match('iTerm', App):
args = ['osascript']
apple_script = ('tell application "' + App + '"\n'
'tell current terminal\n'
'tell current session\n'
'write text "' + cmd + '"\n'
'end tell\n'
'end tell\n'
'end tell\n')
args.extend(['-e', apple_script])
subprocess.Popen(args)
else:
sublime.error_message("Only OSX is supported - sorry!")
class SendSelectCommand(sublime_plugin.TextCommand):
# expand selection to {...} when being triggered
def expand_sel(self, sel):
esel = self.view.find(r"""^.*(\{(?:(["\'])(?:[^\\\\]|\\\\.|\n)*?\\2|#.*$|[^\{\}]|\n|(?1))*\})"""
, self.view.line(sel).begin())
if self.view.line(sel).begin() == esel.begin():
return esel
def run(self, edit):
cmd = ''
for sel in self.view.sel():
if sel.empty():
thiscmd = self.view.substr(self.view.line(sel))
# if the line ends with {, expand to {...}
if re.match(r".*\{\s*$", thiscmd):
esel = self.expand_sel(sel)
if esel:
thiscmd = self.view.substr(esel)
settings = sublime.load_settings(settingsfile)
if settings.get('advance_cursor'):
self.advanceCursor(sel) ### (Aaron F) Snagged from Rtools.py I think this is approximate the same place that Rtools.py uses the advanceCursor function. In that file they loop over "region" instead of "sel".
else:
thiscmd = self.view.substr(sel)
# if selection is function meta definition (R), expand to {...}
if self.view.score_selector(sel.end()-1, "meta.function.r") and \
not self.view.score_selector(sel.end(), "meta.function.r"):
esel = self.expand_sel(sel)
if esel:
thiscmd = self.view.substr(esel)
cmd += thiscmd +'\n'
runcmd(cmd)
### (Aaron F) function below is snagged from Rtools.py
def advanceCursor(self, region):
(row, col) = self.view.rowcol(region.begin())
# Make sure not to go past end of next line
nextline = self.view.line(self.view.text_point(row + 1, 0))
if nextline.size() < col:
loc = self.view.text_point(row + 1, nextline.size())
else:
loc = self.view.text_point(row + 1, col)
# Remove the old region and add the new one
self.view.sel().subtract(region)
self.view.sel().add(sublime.Region(loc, loc))
### (Aaron F) End of snag
class RDocsCommand(sublime_plugin.TextCommand):
def run(self, edit):
# self.view.insert(edit, 1, self.view.sel()[0])
sel = self.view.sel()[0]
# Taking out spaces and such
# params_reg = self.view.find('(?<=\().*(?=\)|$)', sel.begin())
# self.view.insert(edit, sel.begin(), params_reg)
params_txt = ''
pp = self.view.substr(sel).split('\n')
for i in pp:
p = re.sub("#+", "#", i)
p1 = re.sub("([^#]+)#.*", "\\1", p)
p2 = re.sub("([^#]+)#(.*)", "# \\2", p)
if not re.search("#", p):
p2 = '';
p2 = re.sub("[^#]+[#|$]", "", p2)
p2 = re.sub(",", " ", p2)
p2 = re.sub("=", " ISEQUALTOVALUETHIS ", p2)
p1 = re.sub("(.*)=(.*)", "\\1", p1)
# p1 = re.sub("#.*", "", p)
# self.view.insert(edit, sel.begin(), p1)
p2=re.sub("\s+"," ",p2)
p2=re.sub("^\s+"," ",p2)
p2=re.sub("\s+\Z","",p2)
p1 = re.sub("(.*),", "\\1 ", p1)
# self.view.insert(edit, sel.begin(), p2)
# if p2 != '':
p = p1 + p2 + ","
# else :
# p = p1
# p = re.sub('(.*)#(.*)[,](.*)', "\\1#\\2 \\3", p)
params_txt += ' ' + p
params_txt=re.sub(".*function\s*\(","",params_txt)
params_txt=re.sub("^\s+","",params_txt)
params_txt=re.sub("\s+\Z","",params_txt)
params_txt=re.sub(',\s+', ",", params_txt)
params_txt=re.sub('\s+=', "=", params_txt)
params_txt=re.sub('=\s+', "=", params_txt)
params_txt=re.sub('\s+', " ", params_txt)
params_txt=re.sub('"', "", params_txt)
params_txt=re.sub(',$', "", params_txt)
params_txt=re.sub('{$', "", params_txt)
params_txt=re.sub('\)$', "", params_txt)
params_txt=re.sub('^,', "", params_txt)
params_txt=re.sub(',#', " #", params_txt)
#### adding trailing , so they are the same
params_txt = params_txt + ","
### take out anything like ifelse() or c()
params_txt=re.sub('=[^=]+?\(\S+?\),', ",", params_txt)
params_txt=re.sub('=[^=]+?\{\S+?\},', ",", params_txt)
# params_txt=re.sub(',$', "", params_txt)
# params_txt=re.sub('#*(.*),*(.*)\n', "\\1;\\2", params_txt)
params_txt=re.sub('(.*), *[#]*(.*)', "\\1 #\\2,", params_txt)
params_txt=re.sub('\n', " ", params_txt)
# self.view.insert(edit, sel.begin(), params_txt)
# Splitting on commas
params = params_txt.split(',')
snippet = "#' @title <brief desc>\n#'\n#' @description <full description>\n"
for p in params:
# self.view.insert(edit, sel.begin(), p + "\n")
# Added if statement if not empty
if p != '':
p = re.sub('(.*)=(.*)\s*,\s*#\s*(.*)', "\\1 \\3", p);
p = re.sub("^\s*","",p)
p = re.sub("#+"," ",p)
p = re.sub('(.*)=(.*)', "\\1", p);
p = re.sub("\s+"," ",p)
p = re.sub("ISEQUALTOVALUETHIS", "=", p)
snippet += "#' @param %s\n" % p
snippet += "#' @export\n#' @keywords\n#' @seealso\n#' @return\n#' @aliases\n#' @examples \dontrun{\n#'\n#'}\n"
self.view.insert(edit, sel.begin(), snippet)
class AppSwitcher(sublime_plugin.WindowCommand):
def show_quick_panel(self, options, done):
sublime.set_timeout(lambda: self.window.show_quick_panel(options, done), 10)
def run(self):
plat = sublime.platform()
if plat == 'osx':
self.app_list = ["Terminal", "iTerm"]
pop_string = ["Terminal", "iTerm 2"]
else:
sublime.error_message("Only OSX is supported - sorry!")
self.show_quick_panel([list(z) for z in zip(self.app_list, pop_string)], self.on_done)
def on_done(self, action):
if action==-1: return
settings = sublime.load_settings(settingsfile)
plat = sublime.platform()
if plat == 'osx':
plat_settings = settings.get('osx')
plat_settings['App'] = self.app_list[action]
settings.set('osx', plat_settings)
else:
sublime.error_message("Only OSX is supported - sorry!")
sublime.save_settings(settingsfile)
| 37.798206 | 250 | 0.490331 |
a336f68b7be6e30606f1cd6ffe5898633f85b910 | 576 | py | Python | findPermsRecursive.py | akakream/crackingInterviews | decc4a85e5bfae996aa73eaa45a640eb6d80e24f | [
"MIT"
] | null | null | null | findPermsRecursive.py | akakream/crackingInterviews | decc4a85e5bfae996aa73eaa45a640eb6d80e24f | [
"MIT"
] | null | null | null | findPermsRecursive.py | akakream/crackingInterviews | decc4a85e5bfae996aa73eaa45a640eb6d80e24f | [
"MIT"
] | null | null | null | test1 = 'abcd'
def findPerms(string):
if len(string) == 1:
return list(string)
s1 = string[:-1]
returned = findPerms(s1)
s2 = string[-1]
willreturned = []
for el in returned:
for index in range(len(el)+1):
will_inserted = list(el)
will_inserted.insert(index, s2)
will_inserted_string = ''.join(will_inserted)
willreturned.append(will_inserted_string)
return willreturned
result = findPerms(test1)
print(f"result: {result}")
print(f"len(result): {len(result)}")
| 19.2 | 57 | 0.590278 |
ad83a03f0f93292c6acbc16c6b236a2bcc0b05cc | 425 | py | Python | systemcheck/systems/__init__.py | team-fasel/SystemCheck | cdc18ee0c8bab67f049228d28d03578babfafb3d | [
"MIT"
] | 2 | 2017-07-07T13:57:52.000Z | 2020-12-11T12:40:55.000Z | systemcheck/systems/__init__.py | team-fasel/systemcheck | cdc18ee0c8bab67f049228d28d03578babfafb3d | [
"MIT"
] | 1 | 2017-02-23T19:30:12.000Z | 2017-02-28T19:15:44.000Z | systemcheck/systems/__init__.py | team-fasel/systemcheck | cdc18ee0c8bab67f049228d28d03578babfafb3d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
""" System Specific Components of the systemcheck Application
Each system type will have a dedicated folder that contains the system specific functions.
"""
# define authorship information
__authors__ = ['Lars Fasel']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2017'
__license__ = 'GNU AGPLv3'
from . import ABAP
from . import generic
| 21.25 | 90 | 0.701176 |
af4b03099e495ced9fe312ca0d2c0b0c05288c61 | 32,216 | py | Python | line.py | mhhm2005eg/CarND-Advanced-Lane-Lines | 1f571e4714df0dcca21fbf2b09b5af73caddb8f4 | [
"MIT"
] | null | null | null | line.py | mhhm2005eg/CarND-Advanced-Lane-Lines | 1f571e4714df0dcca21fbf2b09b5af73caddb8f4 | [
"MIT"
] | null | null | null | line.py | mhhm2005eg/CarND-Advanced-Lane-Lines | 1f571e4714df0dcca21fbf2b09b5af73caddb8f4 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import matplotlib
#matplotlib.use('Agg') # None interactive backend
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
from matplotlib import cm
import matplotlib.pyplot as plt
import matplotlib as mpl
import math
from common import DEBUG, draw_points, draw_sub_plots, hough_lines, USE_THREADING, TEST_MODE, LANE_PERCENTAGE_SCANE
from collections import OrderedDict
# Define a class to receive the characteristics of each line detection
class Line(object):
lane_width = 3.7
dashed_line_length = 3
# Define conversions in x and y from pixels space to meters
ym_per_pix = (30 / 720) # meters per pixel in y dimension
xm_per_pix = (3.7 / 700) # meters per pixel in x dimension
lane_width_pix = 700
count = 0
ploty = 0
left_fit = []
right_fit = []
def __init__(self, caller, binary_warped=None, image=None, nwindows = 9, margin = 75, minpix = 300):
# was the line detected in the last iteration?
self.detected = False
# x values of the last n fits of the line
self.recent_xfitted = []
#average x values of the fitted line over the last n iterations
self.bestx = None
#polynomial coefficients averaged over the last n iterations
self.best_fit = None
#polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
#radius of curvature of the line in some units
self.radius_of_curvature = None
#distance in meters of vehicle center from the line
self.line_base_pos = None
#difference in fit coefficients between last and new fits
self.diffs = np.array([0,0,0], dtype='float')
#x values for detected line pixels
self.allx = None
#y values for detected line pixels
self.ally = None
self.binary_warped = binary_warped
self.nwindows = nwindows
self.margin = margin
self.minpix = minpix
self.leftx_delta = 0
self.lefty_delta = 0
self.rightx_delta = 0
self.righty_delta = 0
self.left_fitx = np.array([], dtype=int)
self.right_fitx = np.array([], dtype=int)
self.leftx = []
self.rightx = []
self.caller = caller
self.image = self.caller.image
self.left_curverad = 0
self.right_curverad = 0
self.window_height = 0
if self.__class__.count == 0:
self.__class__.ploty = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
self.__class__.count += 1
def _find_lane_pixels(self, binary_warped, base=LANE_PERCENTAGE_SCANE): # base could be o.5
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[np.int(binary_warped.shape[0]*base):, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0] // (2))
if histogram[:midpoint].any():
leftx_base = np.argmax(histogram[:midpoint])
left_found = True
else:
left_found = False
if histogram[midpoint:].any():
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
right_found = True
else:
right_found = False
if (left_found)&(not right_found):
rightx_base = leftx_base + Line.lane_width_pix
elif (not left_found) & ( right_found):
leftx_base = rightx_base - Line.lane_width_pix
elif (not left_found) & (not right_found):
leftx_base = np.int(histogram.shape[0] // (4))
rightx_base = np.int(3*histogram.shape[0] // (4))
#print(leftx_base, rightx_base, binary_warped.shape[1] //2)
left_lane_indsx = list()
left_lane_indsy = list()
right_lane_indsx = list()
right_lane_indsy = list()
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = self.nwindows
# Set the width of the windows +/- margin
margin = self.margin
# Set minimum number of pixels found to recenter window
minpix = self.minpix
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0] // (nwindows))
self.window_height = window_height
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
lefty_current = binary_warped.shape[0] - 1
righty_current = binary_warped.shape[0] - 1
if 0:
left_lane_indsx.append(leftx_current)
left_lane_indsy.append(lefty_current)
right_lane_indsx.append(rightx_current)
right_lane_indsy.append(righty_current)
plt.plot(leftx_current, lefty_current, '*')
plt.plot(rightx_current, righty_current, '*')
X_MAX = self.caller.__class__.img_size[0] - 1
Y_MAX = self.caller.__class__.img_size[1] - 1
#print(X_MAX)
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - int(window + 1) * window_height
win_y_high = binary_warped.shape[0] - window * window_height
### TO-DO: Find the four below boundaries of the window ###
win_xleft_low = int(leftx_current - margin) # Update this
win_xleft_high = int(leftx_current + margin) # Update this
win_xright_low = int(rightx_current - margin) # Update this
win_xright_high = int(rightx_current + margin) # Update this
if 0:
if (win_xleft_high > X_MAX):
win_xleft_high = X_MAX
if (win_xright_high > X_MAX):
win_xright_high = X_MAX
if not DEBUG:
# Draw the windows on the visualization image
cv2.rectangle(out_img, (win_xleft_low, win_y_low),
(win_xleft_high, win_y_high), (0, 255, 0), 2)
cv2.rectangle(out_img, (win_xright_low, win_y_low),
(win_xright_high, win_y_high), (0, 255, 0), 2)
# plt.imshow(out_img)
# plt.show()
window_left_data = binary_warped[win_y_low:win_y_high, win_xleft_low:win_xleft_high]
window_right_data = binary_warped[win_y_low:win_y_high, win_xright_low:win_xright_high]
# plt.imshow(window_left_data, 'gray')
# plt.show()
### TO-DO: Identify the nonzero pixels in x and y within the window ###
good_left_inds = window_left_data.nonzero()
good_right_inds = window_right_data.nonzero()
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# print(len(good_left_inds[0]), len(good_right_inds[0]))
if len(good_left_inds[0]) >= minpix:
histogram_left_x = np.sum(window_left_data, axis=0)
histogram_left_y = np.sum(window_left_data, axis=1)
self.leftx_delta = np.argmax(histogram_left_x)
self.lefty_delta = np.argmax(histogram_left_y)
leftx_current += self.leftx_delta + win_xleft_low - leftx_current
lefty_current += self.lefty_delta + win_y_low - lefty_current
left_x_OK = True
else:
left_x_OK = False
pass
#leftx_current += self.leftx_delta
#lefty_current += self.lefty_delta
if len(good_right_inds[0]) >= minpix:
histogram_right_x = np.sum(window_right_data, axis=0)
histogram_right_y = np.sum(window_right_data, axis=1)
self.rightx_delta = np.argmax(histogram_right_x)
self.righty_delta = np.argmax(histogram_right_y)
rightx_current += self.rightx_delta + win_xright_low - rightx_current
righty_current += self.righty_delta + win_y_low - righty_current
right_x_OK = True
else:
pass
right_x_OK = False
#rightx_current += self.rightx_delta
#righty_current += self.righty_delta
if left_x_OK & (not right_x_OK):
rightx_current += self.leftx_delta - margin
righty_current += -self.lefty_delta - self.window_height
self.rightx_delta = self.leftx_delta
self.righty_delta = self.lefty_delta
#print(-1)
elif (not left_x_OK) & ( right_x_OK):
leftx_current += self.rightx_delta - margin
lefty_current += -self.righty_delta - self.window_height
self.leftx_delta = self.rightx_delta
self.lefty_delta = self.righty_delta
#print(1)
elif (not left_x_OK) & (not right_x_OK):
pass
rightx_current += self.rightx_delta - margin
righty_current += -self.righty_delta - self.window_height
leftx_current += self.leftx_delta - margin
lefty_current += -self.lefty_delta- self.window_height
#print(0)
else:
pass
#print(2)
#righty_current = min(righty_current, binary_warped.shape[0]-1)
#lefty_current = min(lefty_current, binary_warped.shape[0]-1)
if 0:
if (leftx_current > X_MAX):
leftx_current = X_MAX
if (rightx_current > X_MAX):
rightx_current = X_MAX
left_lane_indsx.append(leftx_current)
left_lane_indsy.append(lefty_current)
right_lane_indsx.append(rightx_current)
right_lane_indsy.append(righty_current)
if (DEBUG) & (TEST_MODE[0] == "IMAGE"):
plt.plot(leftx_current, lefty_current, '*')
plt.plot(rightx_current, righty_current, '*')
plt.imshow(out_img)
plt.show()
# Extract left and right line pixel positions
leftx = np.array(left_lane_indsx).astype(int) # nonzerox[left_lane_inds]
lefty = np.array(left_lane_indsy).astype(int) # nonzeroy[left_lane_inds]
rightx = np.array(right_lane_indsx).astype(int) # nonzerox[right_lane_inds]
righty = np.array(right_lane_indsy).astype(int) # nonzeroy[right_lane_inds]
#out_img[lefty, leftx] = [255,0,0]
#out_img[righty, rightx] = [255,0,0]
if not DEBUG & (TEST_MODE[0] == "IMAGE"):
for center in zip(leftx, lefty):
cv2.circle(out_img, center, 5, (255,255,0), thickness=3, lineType=12, shift=0)
for center in zip(rightx, righty):
cv2.circle(out_img, center, 5, (255,0,255), thickness=3, lineType=12, shift=0)
#print(leftx, lefty)
#print(rightx, righty)
#print("writing ", leftx)
self.leftx = leftx
self.rightx = rightx
return leftx, lefty, rightx, righty, out_img
def _fit_polynomial(self, binary_warped=None):
if binary_warped is None:
binary_warped = self.binary_warped
#if (self.__class__.count > 1) & (TEST_MODE[0] == "VIDEO") & (not (self.__class__.left_fit == [])) : #check if there is previous good fram
if not self.caller.__class__.poly_resynch:
return self.search_around_poly(binary_warped)
# Find our lane pixels first
leftx, lefty, rightx, righty, out_img = self._find_lane_pixels(binary_warped)
order = 2
### TO-DO: Fit a second order polynomial to each using `np.polyfit` ###
left_fit = np.polyfit(lefty, leftx, order)
right_fit = np.polyfit(righty, rightx, order)
# Generate x and y values for plotting
ploty = self.__class__.ploty #np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
try:
self.left_fitx = left_fit[0] * ploty ** order + left_fit[1] * ploty**(order-1) + left_fit[2]* ploty**(order-2) #+ left_fit[3]* ploty**(order-3)
self.right_fitx = right_fit[0] * ploty ** order + right_fit[1] * ploty**(order-1) + right_fit[2]* ploty**(order-2)#+ right_fit[3]* ploty**(order-3)
except TypeError:
# Avoids an error if `left` and `right_fit` are still none or incorrect
print('The function failed to fit a line!')
self.left_fitx = 1 * ploty ** 2 + 1 * ploty
self.right_fitx = 1 * ploty ** 2 + 1 * ploty
HANDLE = False
if False :#order == 2:
a = right_fit[0]
b = right_fit[1]
c = right_fit[2] - self.caller.img_size[0]
d = b ** 2 - 4 * a * c
y1 = (-b + math.sqrt(d)) / (2 * a)
y2 = (-b - math.sqrt(d)) / (2 * a)
print(y1, y2)
YMAX = max(y1, y2)
if (YMAX < self.caller.img_size[1]) :
HANDLE = False
else:
HANDLE = True
#ploty = np.linspace(YMAX + 1, self.caller.img_size[1] - 1)
#self.left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
#self.right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
## Visualization ##
# Colors in the left and right lane regions
if (TEST_MODE[0] == "IMAGE"):
try:
out_img[ploty.astype(int), self.left_fitx.astype(int)] = [255, 0, 0]
out_img[ploty.astype(int), self.right_fitx.astype(int)] = [0, 0, 255]
# Plots the left and right polynomials on the lane lines
plt.plot(self.left_fitx, ploty, color='red')
plt.plot(self.right_fitx, ploty, color='red')
except:
pass
if (not DEBUG) & (TEST_MODE[0] == "IMAGE"):
try:
plt.plot(self.right_fitx, ploty)
plt.plot(self.left_fitx, ploty)
#plt.legend()
#plt.show()
except:
pass
self.__class__.left_fit = left_fit
self.__class__.right_fit = right_fit
return out_img
def find_lane_pixels(self, binary_warped):
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0] // 2:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0] // 2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = 9
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0] // nwindows)
self.window_height = window_height
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window + 1) * window_height
win_y_high = binary_warped.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img, (win_xleft_low, win_y_low),
(win_xleft_high, win_y_high), (0, 255, 0), 2)
cv2.rectangle(out_img, (win_xright_low, win_y_low),
(win_xright_high, win_y_high), (0, 255, 0), 2)
# Identify the nonzero pixels in x and y within the window #
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
self.leftx = leftx
self.rightx = rightx
return leftx, lefty, rightx, righty, out_img
def fit_polynomial(self, binary_warped=None):
if binary_warped is None:
binary_warped = self.binary_warped
# Find our lane pixels first
leftx, lefty, rightx, righty, out_img = self.find_lane_pixels(binary_warped)
# Fit a second order polynomial to each using `np.polyfit`
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Generate x and y values for plotting
ploty = self.ploty
try:
self.left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
self.right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
except TypeError:
# Avoids an error if `left` and `right_fit` are still none or incorrect
print('The function failed to fit a line!')
self.left_fitx = 1 * ploty ** 2 + 1 * ploty
self.right_fitx = 1 * ploty ** 2 + 1 * ploty
## Visualization ##
# Colors in the left and right lane regions
out_img[lefty, leftx] = [255, 0, 0]
out_img[righty, rightx] = [0, 0, 255]
# Plots the left and right polynomials on the lane lines
if DEBUG:
plt.plot(self.left_fitx, ploty, color='yellow')
plt.plot(self.right_fitx, ploty, color='yellow')
self.left_fit = left_fit
self.right_fit = right_fit
return out_img
def project(self, annotate1="", annotate2="", pos1=0.5, pos2=0.5):
# Create an image to draw the lines on
warp_zero = np.zeros_like(self.binary_warped).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
#Saturation_image = self.caller.saturation.get(binary=True, masked=True)
combined = self.caller.combined
lane_image = combined
lane_image = hough_lines(lane_image * 255, min_line_len=20, max_line_gap=1)
color_image = np.dstack((lane_image, np.zeros_like(lane_image), np.zeros_like(lane_image)))
color_nonzero = color_image.nonzero()
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([self.left_fitx, self.ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([self.right_fitx, self.ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, self.caller.__class__.Minv, (self.image.shape[1], self.image.shape[0]))
# Combine the result with the original image
temp_image = np.copy(self.image)
temp_image[color_nonzero] = 0
result = cv2.addWeighted(temp_image, 1, newwarp, 0.3, 0)
if not USE_THREADING:
fig0 = plt.figure(0)
#plt.imshow(result).make_image()
if 0:
plt.annotate(annotate1, (10, 50), color='w', weight='bold', size=14)
plt.annotate(annotate2, (10, 100), color='w', weight='bold', size=14)
plt.imshow(result)
#font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSerif.ttf", 40)
font = ImageFont.truetype("arial.ttf", 40)
img = Image.fromarray(result)
draw = ImageDraw.Draw(img)
draw.text((img.size[1]*pos1, 10), annotate1, (255, 255, 255), font=font)
draw.text((img.size[1]*pos2, 80), annotate2, (255, 255, 255), font=font)
draw = ImageDraw.Draw(img)
#img.save("a_test.png")
im_arr = np.fromstring(img.tobytes(), dtype=np.uint8)
result = im_arr.reshape((img.size[1], img.size[0], 3))
return result
def measure_curvature_meter(self):
'''
Calculates the curvature of polynomial functions in pixels.
'''
#print(self.leftx)
#if not list(self.leftx):
#return 5555, 5555
leftx = np.array(self.leftx[::-1]) # Reverse to match top-to-bottom in y
rightx = np.array(self.rightx[::-1]) # Reverse to match top-to-bottom in y
ploty = np.linspace(0, len(leftx) - 1, len(leftx))*self.caller.ysize/len(leftx)
plotyy = ploty * self.ym_per_pix
#print(1, leftx.size, rightx.size)
left_fit = np.polyfit(plotyy, leftx * self.xm_per_pix, 2)
right_fit = np.polyfit(plotyy, rightx * self.xm_per_pix, 2)
#print(2, leftx , leftx.size)
A = left_fit[0]
B = left_fit[1]
y = np.max(ploty * self.ym_per_pix)
left_curverad = ((1 + (2 * A * y + B) ** 2) ** (3 / 2)) / abs(
2 * A) ## Implement the calculation of the left line here
A = right_fit[0]
B = right_fit[1]
right_curverad = ((1 + (2 * A * y + B) ** 2) ** (3 / 2)) / abs(
2 * A) ## Implement the calculation of the right line here
self.left_curverad = left_curverad
self.right_curverad = right_curverad
if TEST_MODE[0] == "IMAGE":
print("Curves: ",left_curverad, right_curverad)
return left_curverad, right_curverad
def measure_curvature_real(self):
'''
Calculates the curvature of polynomial functions in meters.
'''
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30 / 720 # meters per pixel in y dimension
xm_per_pix = 3.7 / 700 # meters per pixel in x dimension
# Start by generating our fake example data
# Make sure to feed in your real data instead in your project!
#ploty, left_fit_cr, right_fit_cr = self.ploty, self.left_fit, self.right_fit
leftx = np.array(self.leftx[::-1]) # Reverse to match top-to-bottom in y
rightx = np.array(self.rightx[::-1]) # Reverse to match top-to-bottom in y
ploty = np.linspace(0, len(leftx) - 1, len(leftx))*self.window_height
plotyy = ploty * self.ym_per_pix
plt.plot(leftx, ploty)
plt.show()
left_fit_cr = np.polyfit(plotyy, leftx * self.xm_per_pix, 2)
right_fit_cr = np.polyfit(plotyy, rightx * self.xm_per_pix, 2)
# Define y-value where we want radius of curvature
# We'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
##### TO-DO: Implement the calculation of R_curve (radius of curvature) #####
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval + left_fit_cr[1]) ** 2) ** (3 / 2)) / abs(
2 * left_fit_cr[0]) ## Implement the calculation of the left line here
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval + right_fit_cr[1]) ** 2) ** (3 / 2)) / abs(
2 * right_fit_cr[0]) ## Implement the calculation of the right line here
return left_curverad, right_curverad
def get_offset(self):
# n =int(self.caller.ysize/2)
Mid1 = self.caller.xsize/2
Mid2 = (self.right_fitx[-1] + self.left_fitx[-1])/2
#print(Mid1, Mid2, len(self.right_fitx))
diff = float((Mid2 - Mid1)*self.xm_per_pix)
# print(self.right_fitx[-1], self.left_fitx[-1],Mid1, Mid2, Mid1-Mid2 )
#print(diff)
return diff
def fit_poly(self, img_shape, leftx, lefty, rightx, righty):
### TO-DO: Fit a second order polynomial to each with np.polyfit() ###
if rightx.any():
right_fit = np.polyfit(righty, rightx, 2)
else:
right_fit = self.right_fit
if leftx.any():
left_fit = np.polyfit(lefty, leftx, 2)
else:
left_fit = self.left_fit
# Generate x and y values for plotting
ploty = np.linspace(0, img_shape[0] - 1, img_shape[0])
### TO-DO: Calc both polynomials using ploty, left_fit and right_fit ###
left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
self.left_fit = left_fit
self.right_fit = right_fit
return left_fitx, right_fitx, ploty
def search_around_poly(self, binary_warped):
# HYPERPARAMETER
# Choose the width of the margin around the previous polynomial to search
# The quiz grader expects 100 here, but feel free to tune on your own!
margin = 50
# Grab activated pixels
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
### TO-DO: Set the area of search based on activated x-values ###
### within the +/- margin of our polynomial function ###
### Hint: consider the window areas for the similarly named variables ###
### in the previous quiz, but change the windows to our new search area ###
# Generate x and y values for plotting
# ploty = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
ploty = np.linspace(0, binary_warped.shape[0] - 1, 10)
### TO-DO: Calc both polynomials using ploty, left_fit and right_fit ###
left_fitx = self.left_fit[0] * ploty ** 2 + self.left_fit[1] * ploty + self.left_fit[2]
right_fitx = self.right_fit[0] * ploty ** 2 + self.right_fit[1] * ploty + self.right_fit[2]
left_fitx_low = left_fitx - margin # boundry left one
left_fitx_high = left_fitx + margin # boundry left two
right_fitx_low = right_fitx - margin # boundry right one
right_fitx_high = right_fitx + margin # boundry right two
left_lane_inds = None
right_lane_inds = None
plotyy = np.concatenate((ploty, ploty[::-1]), axis=None)
plotxx = np.concatenate((left_fitx_low, left_fitx_high[::-1]), axis=None)
zip_xx_yy = [list(zip(plotxx, plotyy))]
vertices_left = np.array(zip_xx_yy, dtype=np.int32)
image_interest_left = self.caller.region_of_interest(binary_warped, vertices_left)
plotxx = np.concatenate((right_fitx_low, right_fitx_high[::-1]), axis=None)
zip_xx_yy = [list(zip(plotxx, plotyy))]
vertices_right = np.array(zip_xx_yy, dtype=np.int32)
image_interest_right = self.caller.region_of_interest(binary_warped, vertices_right)
if TEST_MODE[0] == "IMAGE":
plt.imshow(binary_warped, 'gray')
draw_points(*zip_xx_yy)
plt.show()
imgs = ["binary_warped", "image_interest_left", "image_interest_right"]
img_dic = OrderedDict()
for img in imgs:
img_dic[img] = eval(img)
draw_sub_plots(img_dic, col_no=3, raw_no=1, title="hh")
# Again, extract left and right line pixel positions
leftx = image_interest_left.nonzero()[1]
lefty = image_interest_left.nonzero()[0]
rightx = image_interest_right.nonzero()[1]
righty = image_interest_right.nonzero()[0]
# Fit new polynomials
left_fitx, right_fitx, ploty = self.fit_poly(binary_warped.shape, leftx, lefty, rightx, righty)
## Visualization ##
# Create an image to draw on and an image to show the selection window
out_img = np.dstack((binary_warped, binary_warped, binary_warped)) * 255
window_img = np.zeros_like(out_img)
# Color in left and right line pixels
out_img[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
out_img[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# Generate a polygon to illustrate the search window area
# And recast the x and y points into usable format for cv2.fillPoly()
left_line_window1 = np.array([np.transpose(np.vstack([left_fitx - margin, ploty]))])
left_line_window2 = np.array([np.flipud(np.transpose(np.vstack([left_fitx + margin,
ploty])))])
left_line_pts = np.hstack((left_line_window1, left_line_window2))
right_line_window1 = np.array([np.transpose(np.vstack([right_fitx - margin, ploty]))])
right_line_window2 = np.array([np.flipud(np.transpose(np.vstack([right_fitx + margin,
ploty])))])
right_line_pts = np.hstack((right_line_window1, right_line_window2))
# Draw the lane onto the warped blank image
cv2.fillPoly(window_img, np.int_([left_line_pts]), (0, 255, 0))
cv2.fillPoly(window_img, np.int_([right_line_pts]), (0, 255, 0))
result = cv2.addWeighted(out_img, 1, window_img, 0.3, 0)
# Plot the polynomial lines onto the image
if DEBUG:
plt.plot(left_fitx, ploty, color='yellow')
plt.plot(right_fitx, ploty, color='yellow')
## End visualization steps ##
self.left_fitx =left_fitx
self.right_fitx = right_fitx
#print("write ", leftx.size, rightx.size)
self.leftx = left_fitx
self.rightx = right_fitx
return result
| 44.010929 | 159 | 0.604544 |
df6e2c004d5ce86908ac2fc9d1fa6a525944c0ff | 8,016 | py | Python | Scweet/scweet.py | pydxflwb/Scweet | 4d6892edf3b76ff9ec50cda5ee570b80b064b46a | [
"MIT"
] | null | null | null | Scweet/scweet.py | pydxflwb/Scweet | 4d6892edf3b76ff9ec50cda5ee570b80b064b46a | [
"MIT"
] | null | null | null | Scweet/scweet.py | pydxflwb/Scweet | 4d6892edf3b76ff9ec50cda5ee570b80b064b46a | [
"MIT"
] | null | null | null | import csv
import os
import datetime
import argparse
import pandas as pd
from utils import init_driver, get_last_date_from_csv, log_search_page, keep_scroling, dowload_images
# class Scweet():
def scrap(start_date, max_date, words=None, to_account=None, from_account=None, interval=5, lang=None,
headless=True, limit=float("inf"), display_type="Top", resume=False, proxy=None, hashtag=None, show_images=False, save_images=False):
"""
scrap data from twitter using requests, starting from start_date until max_date. The bot make a search between each start_date and end_date
(days_between) until it reaches the max_date.
return:
data : df containing all tweets scraped with the associated features.
save a csv file containing all tweets scraped with the associated features.
"""
# initiate the driver
if save_images == True:
show_images = True
driver = init_driver(headless, proxy, show_images)
data = []
tweet_ids = set()
save_dir = "outputs"
write_mode = 'w'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
# start scraping from start_date until max_date
init_date = start_date # used for saving file
# add interval to start_date to get end_date for te first search
if words:
if type(words) == str :
words = words.split("//")
path = save_dir + "/" + words[0] + '_' + str(init_date).split(' ')[0] + '_' + \
str(max_date).split(' ')[0] + '.csv'
else :
path = save_dir + "/" + words[0] + '_' + str(init_date).split(' ')[0] + '_' + \
str(max_date).split(' ')[0] + '.csv'
elif from_account:
path = save_dir + "/" + from_account + '_' + str(init_date).split(' ')[0] + '_' + str(max_date).split(' ')[
0] + '.csv'
elif to_account:
path = save_dir + "/" + to_account + '_' + str(init_date).split(' ')[0] + '_' + str(max_date).split(' ')[
0] + '.csv'
elif hashtag:
path = save_dir + "/" + hashtag + '_' + str(init_date).split(' ')[0] + '_' + str(max_date).split(' ')[
0] + '.csv'
if resume:
start_date = str(get_last_date_from_csv(path))[:10]
write_mode = 'a'
# start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(start_date, '%Y-%m-%d') + datetime.timedelta(days=interval)
refresh = 0
# save_every = interval #save every "interval" days
# keep searching until max_date
with open(path, write_mode, newline='', encoding='utf-8') as f:
header = ['UserScreenName', 'UserName', 'Timestamp', 'Text', 'Emojis', 'Comments', 'Likes', 'Retweets',
'Image link', 'Tweet URL']
writer = csv.writer(f)
if write_mode == 'w':
writer.writerow(header)
while end_date <= datetime.datetime.strptime(max_date, '%Y-%m-%d'):
# number of scrolls
scroll = 0
# log search page between start_date and end_date
if type(start_date) != str:
log_search_page(driver=driver, words=words,
start_date=datetime.datetime.strftime(start_date, '%Y-%m-%d'),
end_date=datetime.datetime.strftime(end_date, '%Y-%m-%d'), to_account=to_account,
from_account=from_account, lang=lang, display_type=display_type,
hashtag=hashtag)
else:
log_search_page(driver=driver, words=words, start_date=start_date,
end_date=datetime.datetime.strftime(end_date, '%Y-%m-%d'), to_account=to_account,
from_account=from_account, hashtag=hashtag, lang=lang, display_type=display_type)
# number of logged pages (refresh each <between_days>)
refresh += 1
# number of days crossed
days_passed = refresh * interval
# last position of the page : the purpose for this is to know if we reached the end of the page of not so
# that we refresh for another <start_date> and <end_date>
last_position = driver.execute_script("return window.pageYOffset;")
# should we keep scrolling ?
scrolling = True
print("looking for tweets between " + str(start_date) + " and " + str(end_date) + " ...")
# start scrolling and get tweets
tweet_parsed = 0
driver, data, writer, tweet_ids, scrolling, tweet_parsed, scroll, last_position = \
keep_scroling(driver, data, writer, tweet_ids, scrolling, tweet_parsed, limit, scroll, last_position)
# keep updating <start date> and <end date> for every search
if type(start_date) == str:
start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d') + datetime.timedelta(days=interval)
else:
start_date = start_date + datetime.timedelta(days=interval)
end_date = end_date + datetime.timedelta(days=interval)
data = pd.DataFrame(data, columns = ['UserScreenName', 'UserName', 'Timestamp', 'Text', 'Emojis',
'Comments', 'Likes', 'Retweets','Image link', 'Tweet URL'])
# save images
if save_images==True:
print("Saving images ...")
save_images_dir = "images"
if not os.path.exists(save_images_dir):
os.makedirs(save_images_dir)
dowload_images(data["Image link"], save_images_dir)
# close the web driver
driver.close()
return data
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Scrap tweets.')
parser.add_argument('--words', type=str,
help='Queries. they should be devided by "//" : Cat//Dog.', default=None)
parser.add_argument('--from_account', type=str,
help='Tweets from this account (axample : @Tesla).', default=None)
parser.add_argument('--to_account', type=str,
help='Tweets replyed to this account (axample : @Tesla).', default=None)
parser.add_argument('--max_date', type=str,
help='Max date for search query. example : %%Y-%%m-%%d.', required=True)
parser.add_argument('--start_date', type=str,
help='Start date for search query. example : %%Y-%%m-%%d.', required=True)
parser.add_argument('--interval', type=int,
help='Interval days between each start date and end date for search queries. example : 5.',
default=1)
parser.add_argument('--lang', type=str,
help='Tweets language. example : "en" for english and "fr" for french.', default=None)
parser.add_argument('--headless', type=bool,
help='Headless webdrives or not. True or False', default=False)
parser.add_argument('--limit', type=int,
help='Limit tweets per <interval>', default=float("inf"))
parser.add_argument('--display_type', type=str,
help='Display type of twitter page : Latest or Top', default="Top")
parser.add_argument('--resume', type=bool,
help='Resume the last scraping. specify the csv file path.', default=False)
parser.add_argument('--proxy', type=str,
help='Proxy server', default=None)
args = parser.parse_args()
words = args.words
max_date = args.max_date
start_date = args.start_date
interval = args.interval
lang = args.lang
headless = args.headless
limit = args.limit
display_type = args.display_type
from_account = args.from_account
to_account = args.to_account
resume = args.resume
proxy = args.proxy
data = scrap(start_date, max_date, words, to_account, from_account, interval, lang, headless, limit,
display_type, resume, proxy)
| 45.288136 | 143 | 0.5998 |
6574126d051a5bc633c98cb61ab9b8c66e26089c | 7,063 | py | Python | displayAndPrompt.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | displayAndPrompt.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | displayAndPrompt.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | from checkState import *
from Player import *
from Action import *
from random import shuffle
availableRoles = [Citizen, Mafian]
### GENERIC PROMPTS ###
# Return True or False, based on input of "y" or "n".
def getYesOrNo(message):
while True:
response = input(message)
if response == 'y':
return True
elif response == 'n':
return False
else:
print('Invalid input!')
# Ensure an integer (within range) is returned from an input prompt. Otherwise, return False.
def getIntOrFalse(message, minimum = 0, maximum = False):
value = input(message)
try:
value = int(value)
except:
return False
else:
if not maximum:
maximum = value
if minimum <= value <= maximum:
return value
else:
print('Invalid input!')
return False
# Display a numbered list of players.
def displayPlayers(playerList):
for i, player in enumerate(playerList):
print('(%s) %s' % (i + 1, player.name))
# Builds a string from a list. Only uses a deliminator if 2 or more items.
def buildStringFromList(inputList, deliminator = ', ', finalDelim = ', and '):
outputString = ''
totalItems = len(inputList)
for i, item in enumerate(inputList):
try:
item = item.name
except:
pass
if i == 0:
outputString = item
elif totalItems == i + 1:
outputString += finalDelim + item
else:
outputString += deliminator + item
return outputString
# Display all teams in list, and the members of those teams.
def displayTeams(teams):
for i, team in enumerate(teams):
print('(#%s) %s' % (i + 1, team.name))
for j, player in enumerate(team.members):
print('\t(#%s) %s' % (j + 1, player.name))
### GAME INITIALIZATION PROMPTS ###
# Prompt to get a list of (team name, isCollaborative, isThreat, killsPerTurn) tuples.
def promptTeams():
teamInfoList = []
while True:
newTeamName = input("What is team %s's name? Leave blank to finish team creation. " % (len(teamInfoList) + 1))
if not newTeamName: break
isThreat = getYesOrNo('Is this team a threat? (y/n): ')
killsPerTurn = getIntOrFalse('How many kills per night are available to this team? ')
isCollaborative = getYesOrNo('Can this team collaborate? (y/n): ')
teamInfoList += [(newTeamName, isThreat, isCollaborative, killsPerTurn)]
return teamInfoList
# Prompt the user to choose roles and the quantity of each.
def getListOfTeamRoles(team):
usedRoles = {}
while True:
print('Select roles for team "%s".' % team.name)
displayRolesDesc(availableRoles)
selection = getIntOrFalse('(Leave blank to start game): ')
if not selection:
return rolesDictToList(usedRoles, team)
selectedRole = availableRoles[selection - 1]
qty = getIntOrFalse('How many %ss would you like?\r\n(Leave blank to pick a different role): ' % selectedRole.title)
if not qty: continue
# Build a dict of (role : quantity) pairs. If role is already in dict, add to existing quantity.
try:
usedRoles[selectedRole] += qty
except:
usedRoles[selectedRole] = qty
print('Current Roles:')
displayRolesQty(usedRoles)
# Take a list of (role, team) tuples, and add player names. Retrun (role, team, player name).
def getPlayerNames(roleList):
shuffle(roleList)
playerInfoList = []
for i, pair in enumerate(roleList):
name = input('Player %s, what is your name? ' % (i + 1))
playerInfoList += [(pair[0], pair[1], name)]
return playerInfoList
# Print the title & description of a list of roles.
def displayRolesDesc(roles):
for i, role in enumerate(roles):
print('(%s) %s - \r\n\t%s' % (i + 1, role.title, role.description))
# Convert the role list from dict (used for display purposes) to a list of (role, team) tuples.
def rolesDictToList(usedRoles, team):
listRoles = []
for key in usedRoles:
listRoles += [(key, team)] * usedRoles[key]
return listRoles
# Print the roles (and quantity of each) in the 'usedRoles' dict.
def displayRolesQty(usedRoles):
for role in usedRoles:
print('%s: %s' % (role.title, usedRoles[role]))
### VOTING PROMPTS ###
# Get player votes, returning a (voter, votee) tuple.
def getVotes(playerList):
voteInfoList = []
for voter in playerList:
votee = promptForVote(voter, playerList)
if votee:
voteInfoList += [(voter, votee)]
return voteInfoList
# Provide a list of players, and get a vote.
def promptForVote(player, playerList):
print('%s, who are you voting for?')
print('(0) Do Not Vote')
displayPlayers(playerList)
voteIndex = getIntOrFalse('Choose a player: ', maximum = len(playerList))
if voteIndex:
return playerList[voteIndex - 1]
else:
return False
# Print all players receiving votes, and the players that voted for them.
def displayVotes():
print('Voting results:')
for player in Globals.globalPlayerTraits:
try: voters = player['voters']
except: voters = False
if voters:
total = len(voters)
voteString = buildStringFromList(voters)
print('%s (%s): %s' % (player['name'], total, voteString))
# Display the voting results.
def displayKilledOff(killedOff):
if killedOff:
print('%s has been voted off!' % killedOff.name)
else:
print('No one was voted off...')
### NIGHT ACTION PROMPTS ###
# Get action selections from each player. Return a list of (player, action, targets, priority) tuples.
def getActions(players):
actionInfoList = []
for player in players:
### Still need to define valid targets here. How do I do that when targets is action dependent? ###
(targets, action, priority) = promptPlayerForAction(player, players)
actionInfoList += [(player, action, targets, priority)]
return actionInfoList
# Prompt the thplayer for an action choice, and a choice of target(s).
def promptPlayerForAction(player, validTargets):
targets = []
print('---You belong to %s---' % player.team.name)
displayTeammates(player)
displayLog(player)
displayActions(player)
indexAction = getIntOrFalse('Which Action will you perform? (Leave blank to pass this turn)', maximum = len(player.actions))
if not indexAction: return targets, player.actions[0], 0
else: indexAction -= 1
action = player.actions[indexAction]
priority = action.priority
for i in range(1, action.totalTargets + 1):
displayPlayers(validTargets)
indexTarget = getIntOrFalse('Who is target # %s? ' % i, maximum = len(validTargets))
targets += [validTargets[indexTarget - 1]]
return targets, action, priority
# If the player belongs to a collaborative team, display his teammates.
def displayTeammates(player):
if player.team.collaborative:
print('Your teammates are:')
for i, teammate in enumerate(player.getTeammates()):
if teammate.traits['alive']: status = ''
else: status = ' (DEAD)'
print('(%s) %s%s' % (i + 1, teammate.name, status))
# Prints the player's log, separating each day by line.
def displayLog(player):
for i, line in enumerate(player.log):
print('--Night %s--' % i)
print(line)
# Display a numbered list of actions.
def displayActions(player):
print('Actions Available for %s:' % player.name)
for i, action in enumerate(player.actions):
print('(%s) %s - \r\n\t%s' % (i + 1, action.name, action.description)) | 31.815315 | 126 | 0.702251 |
7cd01f517a7304fa0a8c9caab5b9c1caa904db3a | 133 | py | Python | tests/view_tests/templatetags/debugtags.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/view_tests/templatetags/debugtags.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/view_tests/templatetags/debugtags.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | from django import template
register = template.Library()
@register.simple_tag
def go_boom():
raise Exception('boom')
| 14.777778 | 30 | 0.699248 |
ad7f70c31b6ba5d6b612979d8dc2d085e35eef55 | 1,378 | py | Python | tests/test_registry.py | bhgomes/oeis | 27555acdcdeec936431789c052ea097b1aa19d69 | [
"Unlicense"
] | 6 | 2019-03-29T05:01:35.000Z | 2021-12-21T08:13:38.000Z | tests/test_registry.py | bhgomes/oeis | 27555acdcdeec936431789c052ea097b1aa19d69 | [
"Unlicense"
] | 4 | 2019-04-05T08:45:23.000Z | 2019-04-15T06:48:40.000Z | tests/test_registry.py | bhgomes/oeis | 27555acdcdeec936431789c052ea097b1aa19d69 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*- #
#
# tests/test_registry.py
#
#
# MIT License
#
# Copyright (c) 2019 Brandon Gomes
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""
Test OEIS Registry.
"""
# -------------- External Library -------------- #
import pytest
# ---------------- oeis Library ---------------- #
from oeis.sequence import *
class TestRegistry:
""""""
| 30.622222 | 80 | 0.714804 |
a44e8df964bb28cf15294cf513db0a2fc9f59d23 | 31,459 | py | Python | autoprotocol/container.py | kevin-ss-kim/autoprotocol-python | f55818e31b5c49bc093291f3ecc452f2b061e0a9 | [
"BSD-3-Clause"
] | null | null | null | autoprotocol/container.py | kevin-ss-kim/autoprotocol-python | f55818e31b5c49bc093291f3ecc452f2b061e0a9 | [
"BSD-3-Clause"
] | null | null | null | autoprotocol/container.py | kevin-ss-kim/autoprotocol-python | f55818e31b5c49bc093291f3ecc452f2b061e0a9 | [
"BSD-3-Clause"
] | null | null | null | """
Container, Well, WellGroup objects and associated functions
:copyright: 2020 by The Autoprotocol Development Team, see AUTHORS
for more details.
:license: BSD, see LICENSE for more details
"""
import json
import warnings
from .constants import SBS_FORMAT_SHAPES
from .unit import Unit
SEAL_TYPES = ["ultra-clear", "foil", "breathable"]
COVER_TYPES = ["standard", "low_evaporation", "universal"]
class Well(object):
"""
A Well object describes a single location within a container.
Do not construct a Well directly -- retrieve it from the related Container
object.
Parameters
----------
container : Container
The Container this well belongs to.
index : int
The index of this well within the container.
"""
def __init__(self, container, index):
self.container = container
self.index = index
self.volume = None
self.name = None
self.properties = {}
@staticmethod
def validate_properties(properties):
if not isinstance(properties, dict):
raise TypeError(
"Aliquot properties {} are of type {}, "
"they should be a `dict`.".format(
properties, type(properties)))
for key, value in properties.items():
if not isinstance(key, str):
raise TypeError(
"Aliquot property {} : {} has a key of type {}, "
"it should be a 'str'.".format(key, value, type(key)))
try:
json.dumps(value)
except TypeError:
raise TypeError(
"Aliquot property {} : {} has a value of type {}, "
"that isn't JSON serializable.".format(
key, value, type(value)
)
)
def set_properties(self, properties):
"""
Set properties for a Well. Existing property dictionary
will be completely overwritten with the new dictionary.
Parameters
----------
properties : dict
Custom properties for a Well in dictionary form.
Returns
-------
Well
Well with modified properties
"""
self.validate_properties(properties)
self.properties = properties.copy()
return self
def add_properties(self, properties):
"""
Add properties to the properties attribute of a Well.
If any property with the same key already exists for the Well then:
- if both old and new properties are lists then append the new property
- otherwise overwrite the old property with the new one
Parameters
----------
properties : dict
Dictionary of properties to add to a Well.
Returns
-------
Well
Well with modified properties
"""
self.validate_properties(properties)
for key, value in properties.items():
if key in self.properties:
values_are_lists = all(
isinstance(_, list)
for _ in [value, self.properties[key]]
)
if values_are_lists:
self.properties[key].extend(value)
else:
message = "Overwriting existing property {} for {}".format(
key, self
)
warnings.warn(message=message)
self.properties[key] = value
else:
self.properties[key] = value
return self
def set_volume(self, vol):
"""
Set the theoretical volume of liquid in a Well.
Parameters
----------
vol : str, Unit
Theoretical volume to indicate for a Well.
Returns
-------
Well
Well with modified volume
Raises
------
TypeError
Incorrect input-type given
ValueError
Volume set exceeds maximum well volume
"""
if not isinstance(vol, str) and not isinstance(vol, Unit):
raise TypeError(
"Volume {} is of type {}, it should be either 'str' or 'Unit'."
"".format(vol, type(vol)))
v = Unit(vol)
max_vol = self.container.container_type.true_max_vol_ul
if v > max_vol:
raise ValueError(
"Theoretical volume {} to be set exceeds "
"maximum well volume {}.".format(v, max_vol))
self.volume = v
return self
def set_name(self, name):
"""
Set a name for this well for it to be included in a protocol's
"outs" section
Parameters
----------
name : str
Well name.
Returns
-------
Well
Well with modified name
"""
self.name = name
return self
def humanize(self):
"""
Return the human readable representation of the integer well index
given based on the ContainerType of the Well.
Uses the humanize function from the ContainerType class. Refer to
`ContainerType.humanize()` for more information.
Returns
-------
str
Index of well in Container (in human readable form)
"""
return self.container.humanize(self.index)
def available_volume(self):
"""
Returns the available volume of a Well.
This is calculated as nominal volume - container_type dead volume
Returns
-------
Unit(volume)
Volume in well
Raises
------
RuntimeError
Well has no volume
"""
if self.volume is None:
raise RuntimeError("well {} has no volume".format(self))
return self.volume - self.container.container_type.dead_volume_ul
def __repr__(self):
"""
Return a string representation of a Well.
"""
return "Well(%s, %s, %s)" % (str(self.container), str(self.index),
str(self.volume))
class WellGroup(object):
"""
A logical grouping of Wells.
Wells in a WellGroup do not necessarily need to be in the same container.
Parameters
----------
wells : list
List of Well objects contained in this WellGroup.
Raises
------
TypeError
Wells is not of the right input type
"""
def __init__(self, wells):
if isinstance(wells, Well):
wells = [wells]
elif isinstance(wells, WellGroup):
wells = wells.wells
elif isinstance(wells, list):
if not all(isinstance(well, Well) for well in wells):
raise TypeError("All elements in list must be wells")
else:
raise TypeError("Wells must be Well, list of wells, WellGroup.")
self.wells = wells
self.name = None
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def set_properties(self, properties):
"""
Set the same properties for each Well in a WellGroup.
Parameters
----------
properties : dict
Dictionary of properties to set on Well(s).
Returns
-------
WellGroup
WellGroup with modified properties
"""
for w in self.wells:
w.set_properties(properties)
return self
def add_properties(self, properties):
"""
Add the same properties for each Well in a WellGroup.
Parameters
----------
properties : dict
Dictionary of properties to set on Well(s).
Returns
-------
WellGroup
WellGroup with modified properties
"""
for w in self.wells:
w.add_properties(properties)
return self
def set_volume(self, vol):
"""
Set the volume of every well in the group to vol.
Parameters
----------
vol : Unit, str
Theoretical volume of each well in the WellGroup.
Returns
-------
WellGroup
WellGroup with modified volume
"""
for w in self.wells:
w.set_volume(vol)
return self
def indices(self):
"""
Return the indices of the wells in the group in human-readable form,
given that all of the wells belong to the same container.
Returns
-------
list(str)
List of humanized indices from this WellGroup
"""
indices = []
for w in self.wells:
assert w.container == self.wells[0].container, (
"All wells in WellGroup must belong to the same container to "
"get their indices.")
indices.append(w.humanize())
return indices
def append(self, other):
"""
Append another well to this WellGroup.
Parameters
----------
other : Well
Well to append to this WellGroup.
Returns
-------
WellGroup
WellGroup with appended well
Raises
------
TypeError
other is not of type Well
"""
if not isinstance(other, Well):
raise TypeError("Input given is not of type 'Well'.")
else:
return self.wells.append(other)
def extend(self, other):
"""
Extend this WellGroup with another WellGroup.
Parameters
----------
other : WellGroup or list of Wells
WellGroup to extend this WellGroup.
Returns
-------
WellGroup
WellGroup extended with specified WellGroup
Raises
------
TypeError
Input WellGroup is not of the right type
"""
if not isinstance(other, (WellGroup, list)):
raise TypeError("Input given is not of type 'WellGroup' or "
"'list'.")
else:
if not all(isinstance(well, Well) for well in other):
raise TypeError("Input given is not of type 'Well'.")
return self.wells.extend(WellGroup(other).wells)
def set_group_name(self, name):
"""
Assigns a name to a WellGroup.
Parameters
----------
name: str
WellGroup name
Returns
-------
str
Name of wellgroup
"""
self.name = name
return self
def wells_with(self, prop, val=None):
"""
Returns a wellgroup of wells with the specified property and value
Parameters
----------
prop: str
the property you are searching for
val: str, optional
the value assigned to the property
Returns
-------
WellGroup
WellGroup with modified properties
Raises
------
TypeError
property or value defined does not have right input type
"""
if not isinstance(prop, str):
raise TypeError("property is not a string: %r" % prop)
if val is not None:
return WellGroup([w for w in self.wells if prop in w.properties and
w.properties[prop] is val])
else:
return WellGroup([w for w in self.wells if prop in w.properties])
def pop(self, index=-1):
"""
Removes and returns the last well in the wellgroup, unless an index is
specified.
If index is specified, the well at that index is removed from the
wellgroup and returned.
Parameters
----------
index: int, optional
the index of the well you want to remove and return
Returns
-------
Well
Well with selected index from WellGroup
"""
return self.wells.pop(index)
def insert(self, i, well):
"""
Insert a well at a given position.
Parameters
----------
i : int
index to insert the well at
well : Well
insert this well at the index
Returns
-------
WellGroup
WellGroup with inserted wells
Raises
------
TypeError
index or well defined does not have right input type
"""
if not isinstance(i, int):
raise TypeError("Input given is not of type 'Int'")
if not isinstance(well, Well):
raise TypeError("Input given is not of type 'Well'")
if i >= len(self.wells):
return self.wells.append(well)
else:
self.wells = self.wells[:i] + [well] + self.wells[i:]
return self.wells
def __setitem__(self, key, item):
"""
Set a specific Well in a WellGroup.
Parameters
----------
key : int
Position in a WellGroup in robotized form.
item: Well
Well or WellGroup to be added
Raises
------
TypeError
Item specified is not of type `Well`
"""
if not isinstance(item, Well):
raise TypeError("Input given is not of type 'Well'.")
self.wells[key] = item
def __getitem__(self, key):
"""
Return a specific Well from a WellGroup.
Parameters
----------
key : int
Position in a WellGroup in robotized form.
Returns
-------
Well
Specified well from given key
"""
return self.wells[key]
def __len__(self):
"""
Return the number of Wells in a WellGroup.
"""
return len(self.wells)
def __repr__(self):
"""
Return a string representation of a WellGroup.
"""
return "WellGroup(%s)" % (str(self.wells))
def __add__(self, other):
"""
Append a Well or Wells from another WellGroup to this WellGroup.
Parameters
----------
other : Well, WellGroup.
Returns
-------
WellGroup
WellGroup with appended wells
Raises
------
TypeError
Input given is not of type Well or WellGroup
"""
if not isinstance(other, (Well, WellGroup)):
raise TypeError("You can only add a Well or WellGroups "
"together.")
if isinstance(other, Well):
return WellGroup(self.wells + [other])
else:
return WellGroup(self.wells + other.wells)
# pylint: disable=redefined-builtin
class Container(object):
"""
A reference to a specific physical container (e.g. a tube or 96-well
microplate).
Every Container has an associated ContainerType, which defines the well
count and arrangement, amongst other properties.
There are several methods on Container which present a convenient interface
for defining subsets of wells on which to operate. These methods return a
WellGroup.
Containers are usually declared using the Protocol.ref method.
Parameters
----------
id : str, optional
Alphanumerical identifier for a Container.
container_type : ContainerType
ContainerType associated with a Container.
name : str, optional
name of the container/ref being created.
storage : str, optional
name of the storage condition.
cover : str, optional
name of the cover on the container.
Raises
------
AttributeError
Invalid cover-type given
"""
def __init__(self, id, container_type, name=None, storage=None,
cover=None):
self.name = name
self.id = id
self.container_type = container_type
self.storage = storage
self.cover = cover
self._wells = [Well(self, idx)
for idx in range(container_type.well_count)]
if self.cover and not (self.is_covered() or self.is_sealed()):
raise AttributeError("%s is not a valid seal or cover "
"type." % cover)
def well(self, i):
"""
Return a Well object representing the well at the index specified of
this Container.
Parameters
----------
i : int, str
Well reference in the form of an integer (ex: 0) or human-readable
string (ex: "A1").
Returns
-------
Well
Well for given reference
Raises
------
TypeError
index given is not of the right type
"""
if not isinstance(i, (int, str)):
raise TypeError("Well reference given is not of type 'int' or "
"'str'.")
return self._wells[self.robotize(i)]
def well_from_coordinates(self, row, column):
"""
Gets the well at 0-indexed position (row, column) within the container.
The origin is in the top left corner.
Parameters
----------
row : int
The 0-indexed row index of the well to be fetched
column : int
The 0-indexed column index of the well to be fetched
Returns
-------
Well
The well at position (row, column)
"""
return self.well(
self.container_type.well_from_coordinates(row=row, column=column)
)
def tube(self):
"""
Checks if container is tube and returns a Well representing the zeroth
well.
Returns
-------
Well
Zeroth well of tube
Raises
-------
AttributeError
If container is not tube
"""
if self.container_type.is_tube:
return self.well(0)
else:
raise AttributeError("%s is a %s and is not a tube" %
(self, self.container_type.shortname))
def wells(self, *args):
"""
Return a WellGroup containing references to wells corresponding to the
index or indices given.
Parameters
----------
args : str, int, list
Reference or list of references to a well index either as an
integer or a string.
Returns
-------
WellGroup
Wells from specified references
Raises
------
TypeError
Well reference is not of a valid input type
"""
if isinstance(args[0], list):
wells = args[0]
else:
wells = [args[0]]
for a in args[1:]:
if isinstance(a, list):
wells.extend(a)
else:
wells.extend([a])
for w in wells:
if not isinstance(w, (str, int, list)):
raise TypeError("Well reference given is not of type"
" 'int', 'str' or 'list'.")
return WellGroup([self.well(w) for w in wells])
def robotize(self, well_ref):
"""
Return the integer representation of the well index given, based on
the ContainerType of the Container.
Uses the robotize function from the ContainerType class. Refer to
`ContainerType.robotize()` for more information.
"""
if not isinstance(well_ref, (str, int, Well, list)):
raise TypeError("Well reference given is not of type 'str' "
"'int', 'Well' or 'list'.")
return self.container_type.robotize(well_ref)
def humanize(self, well_ref):
"""
Return the human readable representation of the integer well index
given based on the ContainerType of the Container.
Uses the humanize function from the ContainerType class. Refer to
`ContainerType.humanize()` for more information.
"""
if not isinstance(well_ref, (int, str, list)):
raise TypeError("Well reference given is not of type 'int',"
"'str' or 'list'.")
return self.container_type.humanize(well_ref)
def decompose(self, well_ref):
"""
Return a tuple representing the column and row number of the well
index given based on the ContainerType of the Container.
Uses the decompose function from the ContainerType class. Refer to
`ContainerType.decompose()` for more information.
"""
if not isinstance(well_ref, (int, str, Well)):
raise TypeError("Well reference given is not of type 'int', "
"'str' or Well.")
return self.container_type.decompose(well_ref)
def all_wells(self, columnwise=False):
"""
Return a WellGroup representing all Wells belonging to this Container.
Parameters
----------
columnwise : bool, optional
returns the WellGroup columnwise instead of rowwise (ordered by
well index).
Returns
-------
WellGroup
WellGroup of all Wells in Container
"""
if columnwise:
num_cols = self.container_type.col_count
num_rows = self.container_type.well_count // num_cols
return WellGroup([self._wells[row * num_cols + col]
for col in range(num_cols)
for row in range(num_rows)])
else:
return WellGroup(self._wells)
def inner_wells(self, columnwise=False):
"""
Return a WellGroup of all wells on a plate excluding wells in the top
and bottom rows and in the first and last columns.
Parameters
----------
columnwise : bool, optional
returns the WellGroup columnwise instead of rowwise (ordered by
well index).
Returns
-------
WellGroup
WellGroup of inner wells
"""
num_cols = self.container_type.col_count
num_rows = self.container_type.row_count()
inner_wells = []
if columnwise:
for c in range(1, num_cols - 1):
wells = []
for r in range(1, num_rows - 1):
wells.append((r * num_cols) + c)
inner_wells.extend(wells)
else:
well = num_cols
for _ in range(1, num_rows - 1):
inner_wells.extend(range(well + 1, well + (num_cols - 1)))
well += num_cols
inner_wells = [self._wells[x] for x in inner_wells]
return WellGroup(inner_wells)
def wells_from(self, start, num, columnwise=False):
"""
Return a WellGroup of Wells belonging to this Container starting from
the index indicated (in integer or string form) and including the
number of proceeding wells specified. Wells are counted from the
starting well rowwise unless columnwise is True.
Parameters
----------
start : Well or int or str
Starting well specified as a Well object, a human-readable well
index or an integer well index.
num : int
Number of wells to include in the Wellgroup.
columnwise : bool, optional
Specifies whether the wells included should be counted columnwise
instead of the default rowwise.
Returns
-------
WellGroup
WellGroup of selected wells
Raises
------
TypeError
Incorrect input types, e.g. `num` has to be of type int
"""
if not isinstance(start, (str, int, Well)):
raise TypeError("Well reference given is not of type 'str',"
"'int', or 'Well'.")
if not isinstance(num, int):
raise TypeError("Number of wells given is not of type 'int'.")
start = self.robotize(start)
if columnwise:
row, col = self.decompose(start)
num_rows = self.container_type.row_count()
start = col * num_rows + row
return WellGroup(self.all_wells(columnwise).wells[start:start + num])
def is_sealed(self):
"""
Check if Container is sealed.
"""
return self.cover in SEAL_TYPES
def is_covered(self):
"""
Check if Container is covered.
"""
return self.cover in COVER_TYPES
def quadrant(self, quad):
"""
Return a WellGroup of Wells corresponding to the selected quadrant of
this Container.
Parameters
----------
quad : int or str
Specifies the quadrant number of the well (ex. 2)
Returns
-------
WellGroup
WellGroup of wells for the specified quadrant
Raises
------
ValueError
Invalid quadrant specified for this Container type
"""
# TODO(Define what each quadrant number corresponds toL)
if isinstance(quad, str):
quad = quad.lower()
if quad == "a1":
quad = 0
elif quad == "a2":
quad = 1
elif quad == "b1":
quad = 2
elif quad == "b2":
quad = 3
else:
raise ValueError("Invalid quadrant index.")
# n_wells: n_cols
allowed_layouts = {96: 12, 384: 24}
n_wells = self.container_type.well_count
if (n_wells not in allowed_layouts or
self.container_type.col_count != allowed_layouts[n_wells]):
raise ValueError("Quadrant is only defined for standard 96 and "
"384-well plates")
if n_wells == 96:
if quad == 0:
return WellGroup(self._wells)
else:
raise ValueError(
"0 or 'A1' is the only valid quadrant for a 96-well "
"plate.")
if quad not in [0, 1, 2, 3]:
raise ValueError("Invalid quadrant {} for plate type {}".format(
quad, str(self.name)))
start_well = [0, 1, 24, 25]
wells = []
for row_offset in range(start_well[quad], 384, 48):
for col_offset in range(0, 24, 2):
wells.append(row_offset + col_offset)
return self.wells(wells)
def set_storage(self, storage):
"""
Set the storage condition of a container, will overwrite
an existing storage condition, will remove discard True.
Parameters
----------
storage : str
Storage condition.
Returns
-------
Container
Container with modified storage condition
Raises
----------
TypeError
If storage condition not of type str.
"""
if not isinstance(storage, str):
raise TypeError("Storage condition given ({0}) is not of type "
"str. {1}.".format(storage, type(storage)))
self.storage = storage
return self
def discard(self):
"""
Set the storage condition of a container to None and
container to be discarded if ref in protocol.
Example
----------
.. code-block:: python
p = Protocol()
container = p.ref("new_container", cont_type="96-pcr",
storage="cold_20")
p.incubate(c, "warm_37", "30:minute")
container.discard()
Autoprotocol generated:
.. code-block:: json
"refs": {
"new_container": {
"new": "96-pcr",
"discard": true
}
}
"""
self.storage = None
return self
# pylint: disable=too-many-locals
def wells_from_shape(self, origin, shape):
"""
Gets a WellGroup that originates from the `origin` and is distributed
across the container in `shape`. This group has a Well for each index
in `range(shape["rows"] * shape["columns"])`.
In cases where the container dimensions are smaller than the shape
format's dimensions the returned WellGroup will reference some wells
multiple times. This is analogous to an SBS96-formatted liquid handler
acting with multiple tips in each well of an SBS24-formatted plate.
Parameters
----------
origin : int or str
The index of the top left corner origin of the shape
shape : dict
See Also Instruction.builders.shape
Returns
-------
WellGroup
The group of wells distributed in `shape` from the `origin`
Raises
------
ValueError
if the shape exceeds the extents of the container
"""
from .instruction import Instruction
shape = Instruction.builders.shape(**shape)
origin = self.well(origin)
# unpacking container and shape format properties
container_rows = self.container_type.row_count()
container_cols = self.container_type.col_count
format_rows = SBS_FORMAT_SHAPES[shape["format"]]["rows"]
format_cols = SBS_FORMAT_SHAPES[shape["format"]]["columns"]
# getting the row and column values for the origin
origin_row, origin_col = self.decompose(origin)
# ratios of container shape to format shape
row_scaling = container_rows / format_rows
col_scaling = container_cols / format_cols
# the 0-indexed coordinates of all wells in origin plate to be included
well_rows = []
well_cols = []
for idx in range(shape["rows"]):
well_row = int(origin_row + idx * row_scaling)
well_rows.append(well_row)
for idx in range(shape["columns"]):
well_col = int(origin_col + idx * col_scaling)
well_cols.append(well_col)
# coordinates of the tail (bottom right well) should not exceed bounds
tail_row = well_rows[-1]
tail_col = well_cols[-1]
# tail_row and tail_col are 0-indexed based
# container_rows and container_cols are 1-indexed based
if tail_row + 1 > container_rows or tail_col + 1 > container_cols:
raise ValueError(
"origin: {} with shape: {} exceeds the bounds of "
"container: {}".format(origin, shape, self)
)
return WellGroup([
self.well_from_coordinates(x, y)
for x in well_rows for y in well_cols
])
def __repr__(self):
"""
Return a string representation of a Container using the specified name.
(ex. Container('my_plate'))
"""
return "Container(%s%s)" % (str(self.name), ", cover=" +
self.cover if self.cover else "")
| 29.318733 | 80 | 0.539909 |
c45b3beaa25f1c783fae4e0d022528bdb94bb53d | 3,597 | py | Python | course_getter/middlewares.py | chtmp223/course-search | 5c7e28edb8281be8c20f73ab2a7a6336dee90aa7 | [
"MIT"
] | null | null | null | course_getter/middlewares.py | chtmp223/course-search | 5c7e28edb8281be8c20f73ab2a7a6336dee90aa7 | [
"MIT"
] | 1 | 2022-03-02T15:02:38.000Z | 2022-03-02T15:02:38.000Z | course_getter/middlewares.py | chtmp223/course-search | 5c7e28edb8281be8c20f73ab2a7a6336dee90aa7 | [
"MIT"
] | null | null | null | # !/usr/bin/python
# -*- coding: utf-8 -*-
# Created on 2020-03-22
# Class: Course Getter Spider Middleware
# Comment: Define the model for spider middleware
from scrapy import signals
class CourseGetterSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class CourseGetterDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| 34.586538 | 78 | 0.666111 |
785853728edc164b4b6d422a318e431019054f53 | 451 | py | Python | frontier/info/migrations/0018_auto_20180125_1933.py | mcmaxwell/frontier | d1f59154108566c652965a43c4b999de33c05c58 | [
"MIT"
] | null | null | null | frontier/info/migrations/0018_auto_20180125_1933.py | mcmaxwell/frontier | d1f59154108566c652965a43c4b999de33c05c58 | [
"MIT"
] | null | null | null | frontier/info/migrations/0018_auto_20180125_1933.py | mcmaxwell/frontier | d1f59154108566c652965a43c4b999de33c05c58 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2018-01-25 19:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('info', '0017_auto_20180125_1923'),
]
operations = [
migrations.AlterField(
model_name='info',
name='logo_footer',
field=models.FileField(upload_to=b''),
),
]
| 21.47619 | 50 | 0.611973 |
ffad8163470873de0b6dd783eeacbbb14a82ea5f | 27,046 | py | Python | autotest/gdrivers/mrf.py | Esri/gdal | 8d9af5086ddb96f707ed281786a1cd278066a7f2 | [
"MIT"
] | 9 | 2019-05-30T17:01:56.000Z | 2021-01-30T01:06:41.000Z | autotest/gdrivers/mrf.py | Esri/gdal | 8d9af5086ddb96f707ed281786a1cd278066a7f2 | [
"MIT"
] | 4 | 2018-10-23T18:43:35.000Z | 2019-07-01T19:29:49.000Z | autotest/gdrivers/mrf.py | Esri/gdal | 8d9af5086ddb96f707ed281786a1cd278066a7f2 | [
"MIT"
] | 6 | 2019-02-03T14:19:32.000Z | 2021-12-19T06:36:49.000Z | #!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read/write functionality for MRF driver.
# Author: Even Rouault, <even.rouault at spatialys.com>
#
###############################################################################
# Copyright (c) 2016, Even Rouault, <even.rouault at spatialys.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
import glob
sys.path.append('../pymod')
from osgeo import gdal
import gdaltest
init_list = [
('byte.tif', 1, 4672, None),
('byte.tif', 1, 4672, ['COMPRESS=DEFLATE']),
('byte.tif', 1, 4672, ['COMPRESS=NONE']),
('byte.tif', 1, 4672, ['COMPRESS=LERC']),
('byte.tif', 1, [4672, 5015], ['COMPRESS=LERC', 'OPTIONS:LERC_PREC=10']),
('byte.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('int16.tif', 1, 4672, None),
('int16.tif', 1, 4672, ['COMPRESS=LERC']),
('int16.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/uint16.tif', 1, 4672, None),
('../../gcore/data/uint16.tif', 1, 4672, ['COMPRESS=LERC']),
('../../gcore/data/uint16.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/int32.tif', 1, 4672, ['COMPRESS=TIF']),
('../../gcore/data/int32.tif', 1, 4672, ['COMPRESS=LERC']),
('../../gcore/data/int32.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/uint32.tif', 1, 4672, ['COMPRESS=TIF']),
('../../gcore/data/uint32.tif', 1, 4672, ['COMPRESS=LERC']),
('../../gcore/data/uint32.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/float32.tif', 1, 4672, ['COMPRESS=TIF']),
('../../gcore/data/float32.tif', 1, 4672, ['COMPRESS=LERC']),
('../../gcore/data/float32.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/float64.tif', 1, 4672, ['COMPRESS=TIF']),
('../../gcore/data/float64.tif', 1, 4672, ['COMPRESS=LERC']),
('../../gcore/data/float64.tif', 1, [4672, 5015], ['COMPRESS=LERC', 'OPTIONS:LERC_PREC=10']),
('../../gcore/data/float64.tif', 1, 4672, ['COMPRESS=LERC', 'OPTIONS=V1:YES']),
('../../gcore/data/utmsmall.tif', 1, 50054, None),
('small_world_pct.tif', 1, 14890, ['COMPRESS=PPNG']),
('byte.tif', 1, [4672, [4603, 4652]], ['COMPRESS=JPEG', 'QUALITY=99']),
# following expected checksums are for: gcc 4.4 debug, mingw/vc9 32-bit, mingw-w64/vc12 64bit, MacOSX
('rgbsmall.tif', 1, [21212, [21162, 21110, 21155, 21116]], ['COMPRESS=JPEG', 'QUALITY=99']),
('rgbsmall.tif', 1, [21212, [21266, 21369, 21256, 21495]], ['INTERLEAVE=PIXEL', 'COMPRESS=JPEG', 'QUALITY=99']),
('rgbsmall.tif', 1, [21212, [21261, 21209, 21254, 21215]], ['INTERLEAVE=PIXEL', 'COMPRESS=JPEG', 'QUALITY=99', 'PHOTOMETRIC=RGB']),
('rgbsmall.tif', 1, [21212, [21283, 21127, 21278, 21124]], ['INTERLEAVE=PIXEL', 'COMPRESS=JPEG', 'QUALITY=99', 'PHOTOMETRIC=YCC']),
('12bit_rose_extract.jpg', 1, [30075, [29650, 29680, 29680, 29650]], ['COMPRESS=JPEG']),
]
def mrf_zen_test():
result = 'success'
expectedCS = 770
testvrt = '''
<VRTDataset rasterXSize="512" rasterYSize="512">
<VRTRasterBand dataType="Byte" band="1">
<ColorInterp>Gray</ColorInterp>
<ComplexSource>
<SourceFilename relativeToVRT="0">tmp/masked.mrf</SourceFilename>
<SourceBand>1</SourceBand>
<SourceProperties RasterXSize="512" RasterYSize="512" DataType="Byte" BlockXSize="512" BlockYSize="512" />
<SrcRect xOff="0" yOff="0" xSize="512" ySize="512" />
<DstRect xOff="0" yOff="0" xSize="512" ySize="512" />
<LUT>0:0,1:255,255:255</LUT>
</ComplexSource>
</VRTRasterBand>
</VRTDataset>
'''
for interleave in 'PIXEL', 'BAND':
co = ['COMPRESS=JPEG', 'INTERLEAVE=' + interleave]
gdal.Translate('tmp/masked.mrf', 'data/masked.jpg', format='MRF', creationOptions=co)
ds = gdal.Open(testvrt)
cs = ds.GetRasterBand(1).Checksum()
if cs != expectedCS:
gdaltest.post_reason('Interleave=' + interleave +
' expected checksum ' + str(expectedCS) + ' got ' + str(cs))
result = 'fail'
for f in glob.glob('tmp/masked.*'):
gdal.Unlink(f)
if result != 'success':
return result
return result
def mrf_overview_near_fact_2():
ref_ds = gdal.Translate('/vsimem/out.tif', 'data/byte.tif')
ref_ds.BuildOverviews('NEAR', [2])
expected_cs = ref_ds.GetRasterBand(1).GetOverview(0).Checksum()
ref_ds = None
gdal.Unlink('/vsimem/out.tif')
for dt in [gdal.GDT_Byte, gdal.GDT_Int16, gdal.GDT_UInt16,
gdal.GDT_Int32, gdal.GDT_UInt32,
gdal.GDT_Float32, gdal.GDT_Float64]:
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'],
outputType=dt)
out_ds.BuildOverviews('NEAR', [2])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != expected_cs:
gdaltest.post_reason('fail')
print(dt)
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_near_with_nodata_fact_2():
for dt in [gdal.GDT_Byte, gdal.GDT_Int16, gdal.GDT_UInt16,
gdal.GDT_Int32, gdal.GDT_UInt32,
gdal.GDT_Float32, gdal.GDT_Float64]:
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'],
outputType=dt,
noData=107)
out_ds.BuildOverviews('NEAR', [2])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
expected_cs = 1117
if cs != expected_cs:
gdaltest.post_reason('fail')
print(dt)
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_avg_fact_2():
for dt in [gdal.GDT_Byte, gdal.GDT_Int16, gdal.GDT_UInt16,
gdal.GDT_Int32, gdal.GDT_UInt32,
gdal.GDT_Float32, gdal.GDT_Float64]:
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'],
outputType=dt)
out_ds.BuildOverviews('AVG', [2])
out_ds = None
expected_cs = 1152
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != expected_cs:
gdaltest.post_reason('fail')
print(dt)
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_avg_with_nodata_fact_2():
for dt in [gdal.GDT_Byte, gdal.GDT_Int16, gdal.GDT_UInt16,
gdal.GDT_Int32, gdal.GDT_UInt32,
gdal.GDT_Float32, gdal.GDT_Float64]:
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'],
outputType=dt,
noData=107)
out_ds.BuildOverviews('AVG', [2])
out_ds = None
expected_cs = 1164
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != expected_cs:
gdaltest.post_reason('fail')
print(dt)
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_near_fact_3():
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'])
out_ds.BuildOverviews('NEAR', [3])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
expected_cs = 478
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_avg_fact_3():
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'])
out_ds.BuildOverviews('AVG', [3])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
expected_cs = 658
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_avg_with_nodata_fact_3():
for dt in [gdal.GDT_Byte, gdal.GDT_Int16, gdal.GDT_UInt16,
gdal.GDT_Int32, gdal.GDT_UInt32,
gdal.GDT_Float32, gdal.GDT_Float64]:
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif',
format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=10'],
outputType=dt,
noData=107)
out_ds.BuildOverviews('AVG', [3])
out_ds = None
expected_cs = 531
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != expected_cs:
gdaltest.post_reason('fail')
print(dt)
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_partial_block():
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=8'])
out_ds.BuildOverviews('NEAR', [2])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != 1087:
gdaltest.post_reason('fail')
print(cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_near_implicit_level():
# We ask only overview level 2, but MRF automatically creates 2 and 4
# so check that 4 is properly initialized
out_ds = gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF',
creationOptions=['COMPRESS=NONE', 'BLOCKSIZE=5'])
out_ds.BuildOverviews('NEAR', [2])
out_ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(1).Checksum()
if cs != 328:
gdaltest.post_reason('fail')
print(cs)
return 'fail'
ds = None
with gdaltest.error_handler():
ds = gdal.Open('/vsimem/out.mrf:MRF:L2')
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
ds = gdal.Open('/vsimem/out.mrf:MRF:L1')
cs = ds.GetRasterBand(1).Checksum()
if cs != 328:
gdaltest.post_reason('fail')
print(cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_overview_external():
gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF')
ds = gdal.Open('/vsimem/out.mrf')
ds.BuildOverviews('NEAR', [2])
ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
expected_cs = 1087
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.mrf.ovr')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_lerc_nodata():
gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF',
noData=107, creationOptions=['COMPRESS=LERC'])
ds = gdal.Open('/vsimem/out.mrf')
nodata = ds.GetRasterBand(1).GetNoDataValue()
if nodata != 107:
gdaltest.post_reason('fail')
print(nodata)
return 'fail'
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.lrc')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_lerc_with_huffman():
gdal.Translate('/vsimem/out.mrf', 'data/small_world.tif', format='MRF',
width=5000, height=5000, creationOptions=['COMPRESS=LERC'])
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 31204
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.lrc')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_cached_source():
# Caching MRF
gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF',
creationOptions=['CACHEDSOURCE=invalid_source', 'NOCOPY=TRUE'])
ds = gdal.Open('/vsimem/out.mrf')
with gdaltest.error_handler():
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 0
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
gdal.Unlink('tmp/byte.idx')
gdal.Unlink('tmp/byte.ppg')
open('tmp/byte.tif', 'wb').write(open('data/byte.tif', 'rb').read())
gdal.Translate('tmp/out.mrf', 'tmp/byte.tif', format='MRF',
creationOptions=['CACHEDSOURCE=byte.tif', 'NOCOPY=TRUE'])
ds = gdal.Open('tmp/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('tmp/byte.tif')
ds = gdal.Open('tmp/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
# Caching MRF in mp_safe mode
gdal.Unlink('tmp/out.mrf')
gdal.Unlink('tmp/out.mrf.aux.xml')
gdal.Unlink('tmp/out.idx')
gdal.Unlink('tmp/out.ppg')
gdal.Unlink('tmp/out.til')
open('tmp/byte.tif', 'wb').write(open('data/byte.tif', 'rb').read())
open('tmp/out.mrf', 'wt').write(
"""<MRF_META>
<CachedSource>
<Source>byte.tif</Source>
</CachedSource>
<Raster mp_safe="on">
<Size x="20" y="20" c="1" />
<PageSize x="512" y="512" c="1" />
</Raster>
<GeoTags>
<BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" />
<Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection>
</GeoTags>
</MRF_META>""")
ds = gdal.Open('tmp/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('tmp/byte.tif')
ds = gdal.Open('tmp/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
# Cloning MRF
open('tmp/cloning.mrf', 'wt').write(
"""<MRF_META>
<CachedSource>
<Source clone="true">out.mrf</Source>
</CachedSource>
<Raster>
<Size x="20" y="20" c="1" />
<PageSize x="512" y="512" c="1" />
</Raster>
<GeoTags>
<BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" />
<Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection>
</GeoTags>
</MRF_META>""")
ds = gdal.Open('tmp/cloning.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('tmp/out.mrf')
gdal.Unlink('tmp/out.mrf.aux.xml')
gdal.Unlink('tmp/out.idx')
gdal.Unlink('tmp/out.ppg')
gdal.Unlink('tmp/out.til')
ds = gdal.Open('tmp/cloning.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
gdal.Unlink('tmp/cloning.mrf')
gdal.Unlink('tmp/cloning.mrf.aux.xml')
gdal.Unlink('tmp/cloning.idx')
gdal.Unlink('tmp/cloning.ppg')
gdal.Unlink('tmp/cloning.til')
return 'success'
def mrf_versioned():
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
# Caching MRF
gdal.Translate('/vsimem/out.mrf', 'data/byte.tif', format='MRF')
gdal.FileFromMemBuffer('/vsimem/out.mrf',
"""<MRF_META>
<Raster versioned="on">
<Size x="20" y="20" c="1" />
<PageSize x="512" y="512" c="1" />
</Raster>
<GeoTags>
<BoundingBox minx="440720.00000000" miny="3750120.00000000" maxx="441920.00000000" maxy="3751320.00000000" />
<Projection>PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26711"]]</Projection>
</GeoTags>
</MRF_META>""")
ds = gdal.Open('/vsimem/out.mrf', gdal.GA_Update)
ds.GetRasterBand(1).Fill(0)
ds = None
ds = gdal.Open('/vsimem/out.mrf')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 0
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
ds = gdal.Open('/vsimem/out.mrf:MRF:V0')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 0
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
ds = gdal.Open('/vsimem/out.mrf:MRF:V1')
cs = ds.GetRasterBand(1).Checksum()
expected_cs = 4672
if cs != expected_cs:
gdaltest.post_reason('fail')
print(cs)
print(expected_cs)
return 'fail'
ds = None
with gdaltest.error_handler():
ds = gdal.Open('/vsimem/out.mrf:MRF:V2')
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
def mrf_cleanup():
files = [
'12bit_rose_extract.jpg.*',
'byte.tif.*',
'int16.tif.*',
'out.idx',
'out.mrf',
'out.mrf.aux.xml',
'out.ppg',
'rgbsmall.tif.*',
'small_world_pct.tif.*',
'float32.tif.*',
'float64.tif.*',
'int32.tif.*',
'uint16.tif.*',
'uint32.tif.*',
'utmsmall.tif.*',
'cloning.*']
for f in [fname for n in files for fname in glob.glob('tmp/' + n)]:
gdal.Unlink(f)
gdal.Unlink('/vsimem/out.mrf')
gdal.Unlink('/vsimem/out.mrf.aux.xml')
gdal.Unlink('/vsimem/out.idx')
gdal.Unlink('/vsimem/out.ppg')
gdal.Unlink('/vsimem/out.til')
return 'success'
gdaltest_list = []
class myTestCreateCopyWrapper:
def __init__(self, ut):
self.ut = ut
def myTestCreateCopy(self):
check_minmax = 'COMPRESS=JPEG' not in self.ut.options
for x in self.ut.options:
if x.find('OPTIONS:LERC_PREC=') >= 0:
check_minmax = False
return self.ut.testCreateCopy(check_minmax=check_minmax)
for item in init_list:
src_filename = item[0]
if src_filename == '12bit_rose_extract.jpg':
import jpeg
jpeg.jpeg_1()
if gdaltest.jpeg_version == '9b':
continue
with gdaltest.error_handler():
ds = gdal.Open('data/' + src_filename)
if ds is None:
continue
ds = None
options = []
if item[3]:
options = item[3]
chksum_param = item[2]
if isinstance(chksum_param, list):
chksum = chksum_param[0]
chksum_after_reopening = chksum_param[1]
else:
chksum = chksum_param
chksum_after_reopening = chksum_param
ut = gdaltest.GDALTest('MRF', src_filename, item[1], chksum, options=options, chksum_after_reopening=chksum_after_reopening)
if ut is None:
print('MRF tests skipped')
ut = myTestCreateCopyWrapper(ut)
gdaltest_list.append((ut.myTestCreateCopy, item[0] + ' ' + str(options)))
gdaltest_list += [mrf_overview_near_fact_2]
gdaltest_list += [mrf_overview_near_with_nodata_fact_2]
gdaltest_list += [mrf_overview_avg_fact_2]
gdaltest_list += [mrf_overview_avg_with_nodata_fact_2]
gdaltest_list += [mrf_overview_near_fact_3]
gdaltest_list += [mrf_overview_avg_fact_3]
gdaltest_list += [mrf_overview_avg_with_nodata_fact_3]
gdaltest_list += [mrf_overview_partial_block]
gdaltest_list += [mrf_overview_near_implicit_level]
gdaltest_list += [mrf_overview_external]
gdaltest_list += [mrf_lerc_nodata]
gdaltest_list += [mrf_lerc_with_huffman]
gdaltest_list += [mrf_cached_source]
gdaltest_list += [mrf_versioned]
gdaltest_list += [mrf_zen_test]
gdaltest_list += [mrf_cleanup]
if __name__ == '__main__':
gdaltest.setup_run('mrf')
gdaltest.run_tests(gdaltest_list)
gdaltest.summarize()
| 33.681196 | 654 | 0.599423 |
f21dc15c750fae3790acf4bb4eeb7c48cd795e8e | 5,017 | py | Python | exot/channel/_base.py | ETHZ-TEC/exot_eengine | 7b7ce6cb949e1b0a02e716b03f2f9af751713b29 | [
"BSD-3-Clause"
] | null | null | null | exot/channel/_base.py | ETHZ-TEC/exot_eengine | 7b7ce6cb949e1b0a02e716b03f2f9af751713b29 | [
"BSD-3-Clause"
] | null | null | null | exot/channel/_base.py | ETHZ-TEC/exot_eengine | 7b7ce6cb949e1b0a02e716b03f2f9af751713b29 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2015-2020, Swiss Federal Institute of Technology (ETH Zurich)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Base class for experiment analyses."""
import abc
import logging
import typing as t
import numpy as np
from numpy import inf
from exot.util.attributedict import AttributeDict
from exot.util.factory import GenericFactory
from exot.util.file import move_action
from exot.util.logging import Loggable, get_root_logger, long_log_formatter
from exot.util.mixins import SubclassTracker
class Channel(SubclassTracker, metaclass=abc.ABCMeta):
@property
def parent(self) -> object:
return getattr(self, "_parent", None)
@parent.setter
def parent(self, value):
self._parent = value
@property
def hasparent(self):
return self.parent is not None
@property
def analyses(self):
if not hasattr(self, "_analyses"):
self._analyses = dict()
return self._analyses
@property
@abc.abstractmethod
def analyses_classes(self):
pass
def bootstrap_analyses(self):
if "ANALYSES" in self.parent.config:
for analysis in self.parent.config.ANALYSES:
self.bootstrap_analysis(analysis)
else:
get_root_logger().info("No analyses specified")
def bootstrap_analysis(self, analysis_name: str):
if "ANALYSES" in self.parent.config:
if analysis_name in self.parent.config.ANALYSES:
kwargs = self.parent.config.ANALYSES[analysis_name].copy()
kwargs.update(name=analysis_name)
args = [self.parent]
if "type" in kwargs:
if kwargs["type"] in self.analyses_classes:
new_analysis = self.analyses_classes[kwargs["type"]](*args, **kwargs)
self.analyses[new_analysis.name] = new_analysis
else:
get_root_logger().critical(
f"Analysis of type %s not available for channel {type(self)}"
% (kwargs["type"])
)
else:
get_root_logger().critical(
f"type not specified in config for analysis {analysis_name}"
)
else:
get_root_logger().critical(
f"Analysis {analysis_name} not specified in configuration!"
)
class ChannelFactory(GenericFactory, klass=Channel):
pass
class Analysis(SubclassTracker, metaclass=abc.ABCMeta):
def __init__(self, *args, **kwargs):
self.config = AttributeDict(kwargs)
self.experiment = args[0]
def execute(self, *args, **kwargs):
if self.path.exists():
self.experiment.logger.info(move_action(self.path))
self.path.mkdir(exist_ok=True)
# Make sure the logfiles are also written to the analysis directory
file_logger = logging.FileHandler(self.log_path)
file_logger.setFormatter(long_log_formatter())
file_logger.setLevel(logging.NOTSET)
self.experiment.logger.addHandler(file_logger)
self._execute_handler(self, *args, **kwargs)
@abc.abstractmethod
def _execute_handler(self, *args, **kwargs):
pass
@property
def name(self):
return self.config.name
@property
def path(self):
return self.experiment.path.joinpath(self.name)
@property
def log_path(self):
return self.path.joinpath("debug.log")
| 36.355072 | 93 | 0.669723 |
208c63f6f2b2cca07c6147b7b4800e2208e64d2b | 1,335 | py | Python | ie/pe.py | nicolasmota/pyIE | 9e942dd692d0eab8a1e7717ad7ff3fe90d590ffb | [
"MIT"
] | null | null | null | ie/pe.py | nicolasmota/pyIE | 9e942dd692d0eab8a1e7717ad7ff3fe90d590ffb | [
"MIT"
] | null | null | null | ie/pe.py | nicolasmota/pyIE | 9e942dd692d0eab8a1e7717ad7ff3fe90d590ffb | [
"MIT"
] | null | null | null | # *-* coding:utf-8 *-*
"""Module states Pernanbuco"""
def start(st_reg_number):
"""Checks the number valiaty for the Pernanbuco state"""
divisor = 11
if len(st_reg_number) > 9:
return False
if len(st_reg_number) < 9:
return False
sum_total = 0
peso = 8
for i in range(len(st_reg_number)-2):
sum_total = sum_total + int(st_reg_number[i]) * peso
peso = peso - 1
rest_division = sum_total % divisor
digit_first = divisor - rest_division
if rest_division < 2:
digit_first = 0
if rest_division > 1:
digit_first = 11 - rest_division
num = 0
peso = 9
mult = 10000000
for i in range(len(st_reg_number)-2):
num = num + int(st_reg_number[i]) * mult
mult = mult/10
num = num + digit_first
new_st = str(num)
sum_total = 0
peso = 9
for i in range(len(new_st)-1):
sum_total = sum_total + int(new_st[i]) * peso
peso = peso - 1
rest_division = sum_total % divisor
if rest_division < 2:
digit_secund = 0
if rest_division > 1:
digit_secund = divisor - rest_division
if digit_secund == int(st_reg_number[len(st_reg_number)-1]) and digit_first == int(st_reg_number[len(st_reg_number)-2]):
return True
else:
return False
| 23.017241 | 124 | 0.601498 |
6376557343f86e1d9c15a874dceea437e1016376 | 4,377 | py | Python | Helper/tap_executor.py | EVOLVED-5G/ELCM | 07d07a114b667e8c6915ee3ef125dd4864dd2247 | [
"Apache-2.0"
] | null | null | null | Helper/tap_executor.py | EVOLVED-5G/ELCM | 07d07a114b667e8c6915ee3ef125dd4864dd2247 | [
"Apache-2.0"
] | null | null | null | Helper/tap_executor.py | EVOLVED-5G/ELCM | 07d07a114b667e8c6915ee3ef125dd4864dd2247 | [
"Apache-2.0"
] | null | null | null | import subprocess
import psutil
import re
from Helper import Level
from Settings import Config, TapConfig
from typing import Dict, Optional, Callable
from time import sleep
class Tap:
initialized = False
tapConfig: TapConfig = None
closingRegex = None
@classmethod
def Initialize(cls):
cls.tapConfig = Config().Tap
cls.closingRegex = re.compile(r'.*Resource ".*" closed.*')
cls.initialized = True
def __init__(self, tapPlan: str, externals: Dict[str, str], logger: Callable):
if not self.initialized:
self.Initialize()
self.tapPlan = tapPlan
self.externals = externals
self.args = self.getArgs(tapPlan, externals)
self.logger = logger
self.closedInstruments = 0
self.closeStarted = False
self.process: Optional[psutil.Process] = None
@staticmethod
def getArgs(tapPlan: str, externals: Dict[str, str]):
if Tap.tapConfig.OpenTap:
args = [Tap.tapConfig.Path, 'run', '-v']
else:
args = [Tap.tapConfig.Path, '-v']
for key, value in externals.items():
args.extend(['-e', f'{key}={value}'])
args.append(tapPlan)
return args
def notify(self):
self.logger(Level.INFO, f'Executing TapPlan: {self.tapPlan}')
if len(self.externals) != 0:
for key, value in self.externals.items():
self.logger(Level.INFO, f' {key}={value}')
def Execute(self) -> int:
self.closedInstruments = 0
self.closeStarted = False
self.notify()
process = subprocess.Popen(self.args, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=Tap.tapConfig.Folder)
sleep(0.5) # Give some time to ensure that psutil finds the process
self.process = psutil.Process(process.pid)
self.tap_stdout(process)
exitCode = process.wait()
return exitCode
def tap_stdout(self, process: subprocess.Popen):
_levels = [('Debug', Level.DEBUG), ('Information', Level.INFO),
('Warning', Level.WARNING), ('Error', Level.ERROR)]
def _inferLevel(l: str) -> Level:
for level in _levels:
string, res = level
if re.match(f'.*:\s*{string}\s*:.*', l): return res
return Level.INFO
pipe = process.stdout
for line in iter(pipe.readline, b''):
try: line = line.decode('utf-8').rstrip()
except Exception as e: line = f"DECODING EXCEPTION: {e}"
level = _inferLevel(line)
self.logger(level, f"[TAP]{line}")
if self.tapConfig.EnsureClosed:
if 'Unable to continue. Now exiting TAP CLI' in line:
self.closeStarted = True
Tap.ensureTapClosed(self.process, self.logger, self.tapConfig.EnsureAdbClosed)
if Tap.closingRegex.match(line):
self.closedInstruments += 1
if self.closedInstruments >= 3 and not self.closeStarted:
self.closeStarted = True
Tap.ensureTapClosed(self.process, self.logger, self.tapConfig.EnsureAdbClosed)
@staticmethod
def ensureTapClosed(tapProcess: psutil.Process, logger, closeAdb):
logger(Level.INFO, 'Ensuring that TAP is correctly closed (in 15 seconds).')
sleep(15)
if tapProcess.is_running():
logger(Level.WARNING, f'TAP still running, stopping child processes '
f'({len(tapProcess.children(recursive=True))})...')
Tap.endProcessTree(tapProcess)
logger(Level.INFO, 'Process tree closed')
else:
logger(Level.INFO, 'TAP closed correctly')
if closeAdb:
for p in psutil.process_iter():
if p.name() == 'adb.exe':
logger(Level.WARNING, f"Closing rogue adb process with PID: {p.pid}")
p.kill()
@classmethod
def endProcessTree(cls, process: psutil.Process):
def safeTerminate(p: psutil.Process):
try: p.terminate()
except psutil.NoSuchProcess: pass
for child in process.children(recursive=True): # type: psutil.Process
safeTerminate(child)
safeTerminate(process)
| 34.738095 | 102 | 0.587617 |
44735e4d81cdc90254897537f132e74ab8c9ba1c | 163 | py | Python | PoseEstimation/Script/Preprocessing/MotionBVH/PreproScript/__init__.py | AtsushiHashimoto/KinectOnTheCeiling | 116448e706da8b4e87e5402310747f46821beb4a | [
"MIT"
] | null | null | null | PoseEstimation/Script/Preprocessing/MotionBVH/PreproScript/__init__.py | AtsushiHashimoto/KinectOnTheCeiling | 116448e706da8b4e87e5402310747f46821beb4a | [
"MIT"
] | null | null | null | PoseEstimation/Script/Preprocessing/MotionBVH/PreproScript/__init__.py | AtsushiHashimoto/KinectOnTheCeiling | 116448e706da8b4e87e5402310747f46821beb4a | [
"MIT"
] | null | null | null |
from .bvh2wc import make_wc
from .normalize_bvh import normalize_bvh
from .reduce_bvh import reduce_bvh
__all__ = ["make_convert", "normalize_bvh", "reduce_bvh"] | 27.166667 | 57 | 0.803681 |
9874d8fe072575b9de82413c0e4e2c91d076174d | 1,702 | py | Python | setup.py | frague59/django-yamlfield | c7eb6f313ffcb6d5128b4b5069a2d7fa5e9f865d | [
"MIT"
] | null | null | null | setup.py | frague59/django-yamlfield | c7eb6f313ffcb6d5128b4b5069a2d7fa5e9f865d | [
"MIT"
] | null | null | null | setup.py | frague59/django-yamlfield | c7eb6f313ffcb6d5128b4b5069a2d7fa5e9f865d | [
"MIT"
] | 2 | 2020-01-31T20:18:37.000Z | 2020-06-07T03:02:40.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from distutils.core import Command
class TestCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'NAME': ':memory:',
'ENGINE': 'django.db.backends.sqlite3'
}
},
MIDDLEWARE_CLASSES=(),
INSTALLED_APPS=('yamlfield',)
)
from django.core.management import call_command
import django
django.setup()
call_command('test', 'yamlfield')
setup(
name='django-yamlfield',
version='1.0.3',
description='A Django database field for storing YAML data',
author='The Los Angeles Times Data Desk',
author_email='datadesk@latimes.com',
url="http://django-yamlfield.readthedocs.io/",
packages=find_packages(),
include_package_data=True,
license="MIT",
install_requires=(
'PyYAML>=3.10',
'six>=1.4.1'
),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'License :: OSI Approved :: MIT License',
],
cmdclass={'test': TestCommand,}
)
| 27.451613 | 64 | 0.568743 |
12769af402a8dd3ee42e119166728eb08b7cadda | 2,226 | py | Python | data/cirq_new/cirq_program/startCirq_noisy621.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/cirq_new/cirq_program/startCirq_noisy621.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/cirq_new/cirq_program/startCirq_noisy621.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=19
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.SWAP.on(input_qubit[2],input_qubit[0])) # number=5
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=17
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=18
c.append(cirq.SWAP.on(input_qubit[2],input_qubit[0])) # number=6
c.append(cirq.rx(-1.0053096491487337).on(input_qubit[2])) # number=10
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=13
c.append(cirq.X.on(input_qubit[3])) # number=14
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=9
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.X.on(input_qubit[2])) # number=12
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
circuit = circuit.with_noise(cirq.depolarize(p=0.01))
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq_noisy621.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | 32.735294 | 77 | 0.688679 |
9c22b5f7fe43753d8468a4da38fe90d37d306be1 | 10,591 | py | Python | tools/gen-magic-strings.py | zhanzr/jerryscript | 130288749884afe853caeddee129c031a17c5ac7 | [
"Apache-2.0"
] | 1,144 | 2018-12-18T09:46:47.000Z | 2022-03-07T14:51:46.000Z | tools/gen-magic-strings.py | zhanzr/jerryscript | 130288749884afe853caeddee129c031a17c5ac7 | [
"Apache-2.0"
] | 281 | 2018-02-13T18:50:54.000Z | 2019-01-26T06:17:56.000Z | tools/gen-magic-strings.py | zhanzr/jerryscript | 130288749884afe853caeddee129c031a17c5ac7 | [
"Apache-2.0"
] | 129 | 2018-12-18T09:46:50.000Z | 2022-03-30T07:30:13.000Z | #!/usr/bin/env python
# Copyright JS Foundation and other contributors, http://js.foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import argparse
import fileinput
import json
import os
import re
from settings import PROJECT_DIR
MAGIC_STRINGS_INI = os.path.join(PROJECT_DIR, 'jerry-core', 'lit', 'lit-magic-strings.ini')
MAGIC_STRINGS_INC_H = os.path.join(PROJECT_DIR, 'jerry-core', 'lit', 'lit-magic-strings.inc.h')
def debug_dump(obj):
def deepcopy(obj):
if isinstance(obj, (list, tuple)):
return [deepcopy(e) for e in obj]
if isinstance(obj, set):
return [repr(e) for e in obj]
if isinstance(obj, dict):
return {repr(k): deepcopy(e) for k, e in obj.items()}
return obj
return json.dumps(deepcopy(obj), indent=4)
def read_magic_string_defs(debug=False):
# Read the `jerry-core/lit/lit-magic-strings.ini` file and returns the magic
# string definitions found therein in the form of
# [LIT_MAGIC_STRINGS]
# LIT_MAGIC_STRING_xxx = "vvv"
# ...
# as
# [('LIT_MAGIC_STRING_xxx', 'vvv'), ...]
# sorted by length and alpha.
ini_parser = ConfigParser()
ini_parser.optionxform = str # case sensitive options (magic string IDs)
ini_parser.read(MAGIC_STRINGS_INI)
defs = [(str_ref, json.loads(str_value) if str_value != '' else '')
for str_ref, str_value in ini_parser.items('LIT_MAGIC_STRINGS')]
defs = sorted(defs, key=lambda ref_value: (len(ref_value[1]), ref_value[1]))
if debug:
print('debug: magic string definitions: {dump}'
.format(dump=debug_dump(defs)))
return defs
def extract_magic_string_refs(debug=False):
results = {}
def process_line(fname, lnum, line, guard_stack):
# Build `results` dictionary as
# results['LIT_MAGIC_STRING_xxx'][('!defined (CONFIG_DISABLE_yyy_BUILTIN)', ...)]
# = [('zzz.c', 123), ...]
# meaning that the given literal is referenced under the given guards at
# the listed (file, line number) locations.
for str_ref in re.findall('LIT_MAGIC_STRING_[a-zA-Z0-9_]+', line):
if str_ref in ['LIT_MAGIC_STRING_DEF',
'LIT_MAGIC_STRING_FIRST_STRING_WITH_SIZE',
'LIT_MAGIC_STRING_LENGTH_LIMIT',
'LIT_MAGIC_STRING__COUNT']:
continue
guard_set = set()
for guards in guard_stack:
guard_set.update(guards)
guard_tuple = tuple(sorted(guard_set))
if str_ref not in results:
results[str_ref] = {}
str_guards = results[str_ref]
if guard_tuple not in str_guards:
str_guards[guard_tuple] = []
file_list = str_guards[guard_tuple]
file_list.append((fname, lnum))
def process_guard(guard):
# Transform `#ifndef MACRO` to `#if !defined (MACRO)` and
# `#ifdef MACRO` to `#if defined (MACRO)` to enable or-ing/and-ing the
# conditions later on.
if guard.startswith('ndef '):
guard = guard.replace('ndef ', '!defined (', 1) + ')'
elif guard.startswith('def '):
guard = guard.replace('def ', 'defined (', 1) + ')'
return guard
def process_file(fname):
# Builds `guard_stack` list for each line of a file as
# [['!defined (CONFIG_DISABLE_yyy_BUILTIN)', ...], ...]
# meaning that all the listed guards (conditionals) have to hold for the
# line to be kept by the preprocessor.
guard_stack = []
for line in fileinput.input(fname):
if_match = re.match('^ *# *if(.*)', line)
elif_match = re.match('^ *# *elif(.*)', line)
else_match = re.match('^ *# *else', line)
endif_match = re.match('^ *# *endif', line)
if if_match is not None:
guard_stack.append([process_guard(if_match.group(1))])
elif elif_match is not None:
guards = guard_stack[-1]
guards[-1] = '!(%s)' % guards[-1]
guards.append(process_guard(elif_match.group(1)))
elif else_match is not None:
guards = guard_stack[-1]
guards[-1] = '!(%s)' % guards[-1]
elif endif_match is not None:
guard_stack.pop()
lnum = fileinput.filelineno()
process_line(fname, lnum, line, guard_stack)
if guard_stack:
print('warning: {fname}: unbalanced preprocessor conditional '
'directives (analysis finished with no closing `#endif` '
'for {guard_stack})'
.format(fname=fname, guard_stack=guard_stack))
for root, _, files in os.walk(os.path.join(PROJECT_DIR, 'jerry-core')):
for fname in files:
if (fname.endswith('.c') or fname.endswith('.h')) \
and fname != 'lit-magic-strings.inc.h':
process_file(os.path.join(root, fname))
if debug:
print('debug: magic string references: {dump}'
.format(dump=debug_dump(results)))
return results
def calculate_magic_string_guards(defs, uses, debug=False):
extended_defs = []
for str_ref, str_value in defs:
if str_ref not in uses:
print('warning: unused magic string {str_ref}'
.format(str_ref=str_ref))
continue
# Calculate the most compact guard, i.e., if a magic string is
# referenced under various guards, keep the one that is more generic.
# E.g.,
# guard1 = A and B and C and D and E and F
# guard2 = A and B and C
# then guard1 or guard2 == guard2.
guards = [set(guard_tuple) for guard_tuple in uses[str_ref].keys()]
for i, guard_i in enumerate(guards):
if guard_i is None:
continue
for j, guard_j in enumerate(guards):
if j == i or guard_j is None:
continue
if guard_i < guard_j:
guards[j] = None
guards = {tuple(sorted(guard)) for guard in guards if guard is not None}
extended_defs.append((str_ref, str_value, guards))
if debug:
print('debug: magic string definitions (with guards): {dump}'
.format(dump=debug_dump(extended_defs)))
return extended_defs
def guards_to_str(guards):
return ' \\\n|| '.join(' && '.join(g for g in sorted(guard))
for guard in sorted(guards))
def generate_header(gen_file):
header = \
"""/* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* This file is automatically generated by the %s script
* from %s. Do not edit! */
""" % (os.path.basename(__file__), os.path.basename(MAGIC_STRINGS_INI))
print(header, file=gen_file)
def generate_magic_string_defs(gen_file, defs):
print(file=gen_file) # empty line separator
last_guards = set([()])
for str_ref, str_value, guards in defs:
if last_guards != guards:
if () not in last_guards:
print('#endif', file=gen_file)
if () not in guards:
print('#if {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
print('LIT_MAGIC_STRING_DEF ({str_ref}, {str_value})'
.format(str_ref=str_ref, str_value=json.dumps(str_value)), file=gen_file)
last_guards = guards
if () not in last_guards:
print('#endif', file=gen_file)
def generate_first_magic_strings(gen_file, defs):
print(file=gen_file) # empty line separator
max_size = len(defs[-1][1])
for size in range(max_size + 1):
last_guards = set([()])
for str_ref, str_value, guards in defs:
if len(str_value) >= size:
if () not in guards and () in last_guards:
print('#if {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
elif () not in guards and () not in last_guards:
if guards == last_guards:
continue
print('#elif {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
elif () in guards and () not in last_guards:
print('#else', file=gen_file)
print('LIT_MAGIC_STRING_FIRST_STRING_WITH_SIZE ({size}, {str_ref})'
.format(size=size, str_ref=str_ref), file=gen_file)
if () in guards:
break
last_guards = guards
if () not in last_guards:
print('#endif', file=gen_file)
def main():
parser = argparse.ArgumentParser(description='lit-magic-strings.inc.h generator')
parser.add_argument('--debug', action='store_true', help='enable debug output')
args = parser.parse_args()
defs = read_magic_string_defs(debug=args.debug)
uses = extract_magic_string_refs(debug=args.debug)
extended_defs = calculate_magic_string_guards(defs, uses, debug=args.debug)
with open(MAGIC_STRINGS_INC_H, 'w') as gen_file:
generate_header(gen_file)
generate_magic_string_defs(gen_file, extended_defs)
generate_first_magic_strings(gen_file, extended_defs)
if __name__ == '__main__':
main()
| 36.52069 | 95 | 0.612879 |
9166c463a5a09c4e4db758f9a5b23734cf446ea5 | 2,194 | py | Python | netbox/extras/webhooks.py | esljaz/netbox | e7f64334c06748b4b85c54d881f5e2b03b9463b5 | [
"Apache-2.0"
] | null | null | null | netbox/extras/webhooks.py | esljaz/netbox | e7f64334c06748b4b85c54d881f5e2b03b9463b5 | [
"Apache-2.0"
] | null | null | null | netbox/extras/webhooks.py | esljaz/netbox | e7f64334c06748b4b85c54d881f5e2b03b9463b5 | [
"Apache-2.0"
] | null | null | null | import hashlib
import hmac
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from django_rq import get_queue
from utilities.api import get_serializer_for_model
from .choices import *
from .models import Webhook
from .registry import registry
def generate_signature(request_body, secret):
"""
Return a cryptographic signature that can be used to verify the authenticity of webhook data.
"""
hmac_prep = hmac.new(
key=secret.encode('utf8'),
msg=request_body,
digestmod=hashlib.sha512
)
return hmac_prep.hexdigest()
def enqueue_webhooks(instance, user, request_id, action):
"""
Find Webhook(s) assigned to this instance + action and enqueue them
to be processed
"""
# Determine whether this type of object supports webhooks
app_label = instance._meta.app_label
model_name = instance._meta.model_name
if model_name not in registry['model_features']['webhooks'].get(app_label, []):
return
# Retrieve any applicable Webhooks
obj_type = ContentType.objects.get_for_model(instance)
action_flag = {
ObjectChangeActionChoices.ACTION_CREATE: 'type_create',
ObjectChangeActionChoices.ACTION_UPDATE: 'type_update',
ObjectChangeActionChoices.ACTION_DELETE: 'type_delete',
}[action]
webhooks = Webhook.objects.filter(obj_type=obj_type, enabled=True, **{action_flag: True})
if webhooks.exists():
# Get the Model's API serializer class and serialize the object
serializer_class = get_serializer_for_model(instance.__class__)
serializer_context = {
'request': None,
}
serializer = serializer_class(instance, context=serializer_context)
# Enqueue the webhooks
webhook_queue = get_queue('default')
for webhook in webhooks:
webhook_queue.enqueue(
"extras.webhooks_worker.process_webhook",
webhook,
serializer.data,
instance._meta.model_name,
action,
str(timezone.now()),
user.username,
request_id
)
| 32.746269 | 97 | 0.674111 |
cd302faef649da130a32c345862657d50462e98c | 1,186 | py | Python | Test/getSystemVisitCode.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | 1 | 2015-08-11T17:39:26.000Z | 2015-08-11T17:39:26.000Z | Test/getSystemVisitCode.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | null | null | null | Test/getSystemVisitCode.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | 2 | 2015-11-13T18:05:48.000Z | 2020-01-17T17:28:08.000Z | __author__ = 'sulantha'
import datetime
from Utils.DbUtils import DbUtils
csvFile = '/data/data03/sulantha/Downloads/av45_list.csv'
MatchDBClient = DbUtils(database='Study_Data.ADNI')
DBClient = DbUtils()
with open(csvFile, 'r') as csv:
next(csv)
for line in csv:
row = line.split(',')
rid = row[0].strip()
date = row[1].strip()
dateT = datetime.datetime.strptime(date, '%m/%d/%Y')
#dateT = datetime.datetime.strptime(date, '%Y-%m-%d')
dateS = dateT.strftime('%Y-%m-%d')
sql = "SELECT DISTINCT subject, visit, seriesid, imageid FROM PET_META_LIST WHERE subject like '%_%_{0}' and scandate = '{1}' and origproc = 'Original'".format(rid, dateS)
result = MatchDBClient.executeAllResults(sql)
checkDBSQL = "SELECT * FROM Conversion WHERE RID = '{0}' AND S_IDENTIFIER = '{1}' AND I_IDENTIFIER = '{2}'".format(rid, 'S{0}'.format(result[0][2]), 'I{0}'.format(result[0][3]))
#print(checkDBSQL)
resultN = DBClient.executeAllResults(checkDBSQL)
if len(resultN) == 0:
print('########################### Not in DB - {0} - {1}'.format(rid, date))
else:
pass
| 39.533333 | 185 | 0.602867 |
3e944a7dbf85b36eb6232aa0c346f40beb6488f8 | 1,291 | py | Python | Curso_Em_Video_Python/ex056.py | ThallesTorres/Curso_Em_Video_Python | 95ffbff5a03f11fee41df746604dfe435f385a3b | [
"MIT"
] | null | null | null | Curso_Em_Video_Python/ex056.py | ThallesTorres/Curso_Em_Video_Python | 95ffbff5a03f11fee41df746604dfe435f385a3b | [
"MIT"
] | null | null | null | Curso_Em_Video_Python/ex056.py | ThallesTorres/Curso_Em_Video_Python | 95ffbff5a03f11fee41df746604dfe435f385a3b | [
"MIT"
] | null | null | null | # Ex: 056 - Desenvolva um programa que leia o nome, idade e sexo de 4 pessoas.
# No final do programa, mostre: A média de idade do grupo; Qual é o nome do
# homem mais velho; Quantas mulheres têm menos de 20 anos.
print('''
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
--Seja bem-vindo!
--Exercício 056
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
''')
soma_idade, mais_velho, cont = 0, 0, 0
nome_mais_velho = ''
print('--Preencha os Dados: ')
for c in range(1, 5):
print(f'--{c}° Pessoa')
nome = input(f'Nome: ').capitalize().strip()
idade = int(input(f'Idade: '))
sexo = input(f'Sexo [M/F]: ').upper().strip()
soma_idade += idade
if sexo != 'M' and sexo != 'F':
print('Sexo Inválido. Tente Novamente...')
elif sexo == 'M' and idade >= mais_velho:
mais_velho = idade
nome_mais_velho = nome
elif sexo == 'F' and idade < 20:
cont += 1
print(f'''\n--Dados Finais:
A Média de Idade do grupo é de {soma_idade / 4:.1f} anos.
O nome do Homem mais velho {nome_mais_velho} e ele tem {mais_velho} anos.
No grupo tem {cont} Mulheres com menos de 20 anos.''')
print('''
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
--Obrigado pelo uso!
--Desenvolvido por Thalles Torres
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-''')
| 28.688889 | 79 | 0.538342 |
3e7b523637a1ba659b9a7ec725858b6521d5017b | 4,047 | py | Python | scripts/tests/make_echolib_api.py | diregoblin/muscle3 | 33761235eebd31984bb470161f259437dba73627 | [
"Apache-2.0"
] | null | null | null | scripts/tests/make_echolib_api.py | diregoblin/muscle3 | 33761235eebd31984bb470161f259437dba73627 | [
"Apache-2.0"
] | null | null | null | scripts/tests/make_echolib_api.py | diregoblin/muscle3 | 33761235eebd31984bb470161f259437dba73627 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import argparse
import api_generator
from api_generator import (
API, Bool, Bytes, Class, Constructor, Destructor, Double, Enum,
EnumVal, Int, Int64t, MemFun, MemFunTmpl, Namespace, Obj, OverloadSet,
String, T, VecDbl, Vec2Dbl, Void)
color_desc = Enum('Color', [('RED', 1), ('GREEN', 2), ('BLUE', 3)])
cmdlineargs_desc = Class('CmdLineArgs', [
Constructor([Int('count')]),
Destructor(),
MemFun(Void(), 'set_arg', [Int('i'), String('arg')]),
])
echo_desc = Class('Echo', [
Constructor([Obj('CmdLineArgs', 'cla')], fc_override=(
'std::intptr_t ECHO_Echo_create_(std::intptr_t cla) {\n'
' CmdLineArgs * cla_p = reinterpret_cast<CmdLineArgs *>(\n'
' cla);\n'
' Echo * result = new Echo(cla_p->argc(), cla_p->argv());\n'
' return reinterpret_cast<std::intptr_t>(result);\n'
'}\n\n'),
f_override=(
'type(ECHO_Echo) function ECHO_Echo_create()\n'
' implicit none\n'
'\n'
' integer :: num_args, i, arg_len\n'
' integer (c_intptr_t) :: cla\n'
' character (kind=c_char, len=:), allocatable :: cur_arg\n'
'\n'
' num_args = command_argument_count()\n'
' cla = ECHO_impl_CmdLineArgs_create_(num_args + 1)\n'
' do i = 0, num_args\n'
' call get_command_argument(i, length=arg_len)\n'
' allocate (character(arg_len+1) :: cur_arg)\n'
' call get_command_argument(i, value=cur_arg)\n'
' cur_arg(arg_len+1:arg_len+1) = c_null_char\n'
' call ECHO_impl_CmdLineArgs_set_arg_('
'cla, i, cur_arg, int(len(cur_arg), c_size_t))\n'
' deallocate(cur_arg)\n'
' end do\n'
' ECHO_Echo_create%ptr = ECHO_Echo_create_(cla)\n'
' call ECHO_impl_CmdLineArgs_free_(cla)\n'
'end function ECHO_Echo_create\n'
'\n'
)),
Destructor(),
MemFun(Void(), 'echo_nothing'),
MemFun(Int(), 'echo_int', [Int('value')]),
MemFun(Int64t(), 'echo_int64_t', [Int64t('value')]),
MemFun(Double(), 'echo_double', [Double('value')]),
MemFun(Bool(), 'echo_bool', [Bool('value')]),
MemFun(EnumVal('Color'), 'echo_enum', [EnumVal('Color', 'value')]),
MemFun(String(), 'echo_string', [String('value')]),
MemFun(VecDbl('echo'), 'echo_double_vec', [VecDbl('value')]),
MemFun(Vec2Dbl('echo'), 'echo_double_vec2', [Vec2Dbl('value')]),
MemFun(Obj('Echo'), 'echo_object', [Obj('Echo', 'value')]),
MemFun(Bytes('echo'), 'echo_bytes', [Bytes('value')]),
MemFun(String(), 'echo_error', [Double('value')], True),
MemFunTmpl(
[Int(), Double(), String()],
T(), 'echo_template', [T('value')], True),
OverloadSet('echo', ['echo_int', 'echo_bool', 'echo_double']),
])
test_api_description = API(
'echolib',
[
'echolib.hpp',
'cmdlineargs.hpp',
'cstdint',
'stdexcept',
'string',
'vector'],
[
Namespace('echolib', True, 'ECHO', [color_desc], [echo_desc]),
Namespace('echolib::impl', False, 'ECHO_impl', [],
[cmdlineargs_desc])
])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Test API Generator')
parser.add_argument('--fortran-c-wrappers', action='store_true')
parser.add_argument('--fortran-module', action='store_true')
args = parser.parse_args()
if args.fortran_c_wrappers:
print(test_api_description.fortran_c_wrapper())
elif args.fortran_module:
print(test_api_description.fortran_module())
| 40.069307 | 78 | 0.527798 |
a219f6da4f5c59dccb81d5b4f910a4b98b6e8463 | 29,704 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_route_tables_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_route_tables_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_route_tables_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RouteTablesOperations(object):
"""RouteTablesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
route_table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
route_table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
route_table_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.RouteTable"
"""Gets the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteTable, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.RouteTable
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_table_name, # type: str
parameters, # type: "_models.RouteTable"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteTable"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'RouteTable')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteTable', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
route_table_name, # type: str
parameters, # type: "_models.RouteTable"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteTable"]
"""Create or updates a route table in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param parameters: Parameters supplied to the create or update route table operation.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.RouteTable
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteTable or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.RouteTable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
route_table_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteTable"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
route_table_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteTable"]
"""Updates a route table tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param parameters: Parameters supplied to update route table tags.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteTable or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.RouteTable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteTableListResult"]
"""Gets all route tables in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteTableListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteTableListResult"]
"""Gets all route tables in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteTableListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeTables'} # type: ignore
| 48.299187 | 191 | 0.657386 |
8834332fd55b3e949eb969e6302ec0a51c490cac | 452 | py | Python | notebooks/cnn_tester.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | null | null | null | notebooks/cnn_tester.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | 8 | 2020-03-24T15:29:05.000Z | 2022-02-10T00:14:06.000Z | notebooks/cnn_tester.py | MichoelSnow/data_science | 7f6c054624268308ec4126a601c9fa8bc5de157c | [
"MIT"
] | null | null | null | from fastai.groups.default_cnn import *
PATH = '/data/msnow/nih_cxr/'
arch = resnet34
sz = 64
bs = 64
# data = get_data(sz,bs)
# learn = ConvLearner.pretrained(arch, data)
# def get_data(sz, bs):
# tfms = tfms_from_model(arch, sz, aug_tfms=transforms_basic, max_zoom=1.05)
# return ImageClassifierData.from_csv(PATH, 'trn', f'{PATH}data_trn.csv', tfms=tfms,
# val_idxs=val_idx, test_name='tst', bs=bs, cat_separator='|') | 30.133333 | 88 | 0.676991 |
1c13afafb5256f28cd44050a05cff996f20c0f8f | 2,458 | py | Python | sdk/python/pulumi_azure_native/containerregistry/v20190401/_enums.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_native/containerregistry/v20190401/_enums.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_native/containerregistry/v20190401/_enums.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'Architecture',
'BaseImageTriggerType',
'OS',
'ResourceIdentityType',
'SecretObjectType',
'SourceControlType',
'SourceRegistryLoginMode',
'SourceTriggerEvent',
'TaskStatus',
'TokenType',
'TriggerStatus',
'Variant',
]
class Architecture(str, Enum):
"""
The OS architecture.
"""
AMD64 = "amd64"
X86 = "x86"
ARM = "arm"
class BaseImageTriggerType(str, Enum):
"""
The type of the auto trigger for base image dependency updates.
"""
ALL = "All"
RUNTIME = "Runtime"
class OS(str, Enum):
"""
The operating system type required for the run.
"""
WINDOWS = "Windows"
LINUX = "Linux"
class ResourceIdentityType(str, Enum):
"""
The identity type.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
NONE = "None"
class SecretObjectType(str, Enum):
"""
The type of the secret object which determines how the value of the secret object has to be
interpreted.
"""
OPAQUE = "Opaque"
VAULTSECRET = "Vaultsecret"
class SourceControlType(str, Enum):
"""
The type of source control service.
"""
GITHUB = "Github"
VISUAL_STUDIO_TEAM_SERVICE = "VisualStudioTeamService"
class SourceRegistryLoginMode(str, Enum):
"""
The authentication mode which determines the source registry login scope. The credentials for the source registry
will be generated using the given scope. These credentials will be used to login to
the source registry during the run.
"""
NONE = "None"
DEFAULT = "Default"
class SourceTriggerEvent(str, Enum):
COMMIT = "commit"
PULLREQUEST = "pullrequest"
class TaskStatus(str, Enum):
"""
The current status of task.
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class TokenType(str, Enum):
"""
The type of Auth token.
"""
PAT = "PAT"
O_AUTH = "OAuth"
class TriggerStatus(str, Enum):
"""
The current status of trigger.
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class Variant(str, Enum):
"""
Variant of the CPU.
"""
V6 = "v6"
V7 = "v7"
V8 = "v8"
| 20.31405 | 118 | 0.631408 |
9e2585d54d10eb09a30f4f0ba788101a27d886aa | 937 | py | Python | octavia-cli/octavia_cli/apply/yaml_loaders.py | Danucas/airbyte | 9e77879a7a3b1a5a559a3df9fa85056365b6fbef | [
"MIT"
] | 1 | 2022-03-29T01:08:58.000Z | 2022-03-29T01:08:58.000Z | octavia-cli/octavia_cli/apply/yaml_loaders.py | Danucas/airbyte | 9e77879a7a3b1a5a559a3df9fa85056365b6fbef | [
"MIT"
] | 5 | 2022-02-22T14:49:48.000Z | 2022-03-19T10:43:08.000Z | octavia-cli/octavia_cli/apply/yaml_loaders.py | Danucas/airbyte | 9e77879a7a3b1a5a559a3df9fa85056365b6fbef | [
"MIT"
] | 1 | 2022-03-11T06:21:24.000Z | 2022-03-11T06:21:24.000Z | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
import os
import re
from typing import Any
import yaml
ENV_VAR_MATCHER_PATTERN = re.compile(r".*\$\{([^}^{]+)\}.*")
def env_var_replacer(loader: yaml.Loader, node: yaml.Node) -> Any:
"""Convert a YAML node to a Python object, expanding variable.
Args:
loader (yaml.Loader): Not used
node (yaml.Node): Yaml node to convert to python object
Returns:
Any: Python object with expanded vars.
"""
return os.path.expandvars(node.value)
class EnvVarLoader(yaml.SafeLoader):
pass
# All yaml nodes matching the regex will be tagged as !environment_variable.
EnvVarLoader.add_implicit_resolver("!environment_variable", ENV_VAR_MATCHER_PATTERN, None)
# All yaml nodes tagged as !environment_variable will be constructed with the env_var_replacer callback.
EnvVarLoader.add_constructor("!environment_variable", env_var_replacer)
| 26.027778 | 104 | 0.729989 |
0c8cf962db429406b999ef073977feb88a082f1f | 1,153 | py | Python | pySUMMA/plot.py | ahsen1402/pySUMMA | 3412e1282bc2cc9b9076f38bf64ef1674d69d770 | [
"Apache-2.0"
] | 1 | 2020-01-11T19:56:05.000Z | 2020-01-11T19:56:05.000Z | pySUMMA/plot.py | learn-ensemble/PY-SUMMA | 1b82237be5dc33af57b5c2e2c16aee4d568ddb77 | [
"Apache-2.0"
] | null | null | null | pySUMMA/plot.py | learn-ensemble/PY-SUMMA | 1b82237be5dc33af57b5c2e2c16aee4d568ddb77 | [
"Apache-2.0"
] | null | null | null | import matplotlib.pyplot as plt
def performance(x, y, ens, metric='AUC', savename=None):
"""Plot the performance of base classifiers and ensembles.
Plot the true vs. the inferred performance of each base classifier. Along
the y-axis plot the performance of each inputted ensemble.
Args:
x : (M,) ndarray
True performance
y : (M, )ndarray
Inferred performance
ens : python dict
{ensemble name : ensemble performance, ...}
metric : str
Either AUC or BA
"""
plt.figure(figsize=(4.5, 3))
plt.plot(x, y, 'o', mfc='none', mew=2, ms=7.5, label='Base Classifiers')
ax = plt.gca()
xlims = ax.get_xlim()
for w in ens:
plt.plot(xlims, [ens[w], ens[w]], ':', label=w)
plt.plot(xlims, xlims, ':', color='k', alpha=0.5)
ax.set_position([0.16, 0.175, 0.45, 0.8])
plt.xlabel('True {}'.format(metric), fontsize=15)
plt.ylabel('Inferred {}'.format(metric), fontsize=15)
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
if savename is not None:
plt.savefig(savename, fmt=savename.split('.')[-1])
plt.show(block=False)
| 32.942857 | 77 | 0.61752 |
480001cddef662e569e41d6f07ee8c4539423e62 | 3,137 | py | Python | pyleecan/GUI/Dialog/DMachineSetup/SWPole/PWSlot60/Gen_PWSlot60.py | helene-t/pyleecan | 8362de9b0e32b346051b38192e07f3a6974ea9aa | [
"Apache-2.0"
] | 1 | 2021-02-26T12:28:45.000Z | 2021-02-26T12:28:45.000Z | GUI/Dialog/DMachineSetup/SWPole/PWSlot60/Gen_PWSlot60.py | magnetron/pyleecan | 2a3338f4ab080ad6488b5ab8746c3fea1f36f177 | [
"Apache-2.0"
] | null | null | null | GUI/Dialog/DMachineSetup/SWPole/PWSlot60/Gen_PWSlot60.py | magnetron/pyleecan | 2a3338f4ab080ad6488b5ab8746c3fea1f36f177 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""File generated according to PWSlot60/gen_list.json
WARNING! All changes made in this file will be lost!
"""
from pyleecan.GUI.Dialog.DMachineSetup.SWPole.PWSlot60.Ui_PWSlot60 import Ui_PWSlot60
class Gen_PWSlot60(Ui_PWSlot60):
def setupUi(self, PWSlot60):
"""Abstract class to update the widget according to the csv doc
"""
Ui_PWSlot60.setupUi(self, PWSlot60)
# Setup of in_R1
txt = self.tr(u"""Pole top radius""")
self.in_R1.setWhatsThis(txt)
self.in_R1.setToolTip(txt)
# Setup of lf_R1
self.lf_R1.validator().setBottom(0)
txt = self.tr(u"""Pole top radius""")
self.lf_R1.setWhatsThis(txt)
self.lf_R1.setToolTip(txt)
# Setup of in_W1
txt = self.tr(u"""Pole top width""")
self.in_W1.setWhatsThis(txt)
self.in_W1.setToolTip(txt)
# Setup of lf_W1
self.lf_W1.validator().setBottom(0)
txt = self.tr(u"""Pole top width""")
self.lf_W1.setWhatsThis(txt)
self.lf_W1.setToolTip(txt)
# Setup of in_W2
txt = self.tr(u"""Pole bottom width""")
self.in_W2.setWhatsThis(txt)
self.in_W2.setToolTip(txt)
# Setup of lf_W2
self.lf_W2.validator().setBottom(0)
txt = self.tr(u"""Pole bottom width""")
self.lf_W2.setWhatsThis(txt)
self.lf_W2.setToolTip(txt)
# Setup of in_H1
txt = self.tr(u"""Pole top height""")
self.in_H1.setWhatsThis(txt)
self.in_H1.setToolTip(txt)
# Setup of lf_H1
self.lf_H1.validator().setBottom(0)
txt = self.tr(u"""Pole top height""")
self.lf_H1.setWhatsThis(txt)
self.lf_H1.setToolTip(txt)
# Setup of in_H2
txt = self.tr(u"""Pole bottom height""")
self.in_H2.setWhatsThis(txt)
self.in_H2.setToolTip(txt)
# Setup of lf_H2
self.lf_H2.validator().setBottom(0)
txt = self.tr(u"""Pole bottom height""")
self.lf_H2.setWhatsThis(txt)
self.lf_H2.setToolTip(txt)
# Setup of in_W3
txt = self.tr(u"""Edge Distance Ploe-coil """)
self.in_W3.setWhatsThis(txt)
self.in_W3.setToolTip(txt)
# Setup of lf_W3
self.lf_W3.validator().setBottom(0)
txt = self.tr(u"""Edge Distance Ploe-coil """)
self.lf_W3.setWhatsThis(txt)
self.lf_W3.setToolTip(txt)
# Setup of in_H3
txt = self.tr(u"""Top Distance Ploe-coil """)
self.in_H3.setWhatsThis(txt)
self.in_H3.setToolTip(txt)
# Setup of lf_H3
self.lf_H3.validator().setBottom(0)
txt = self.tr(u"""Top Distance Ploe-coil """)
self.lf_H3.setWhatsThis(txt)
self.lf_H3.setToolTip(txt)
# Setup of in_H4
txt = self.tr(u"""Bottom Distance Ploe-coil """)
self.in_H4.setWhatsThis(txt)
self.in_H4.setToolTip(txt)
# Setup of lf_H4
self.lf_H4.validator().setBottom(0)
txt = self.tr(u"""Bottom Distance Ploe-coil """)
self.lf_H4.setWhatsThis(txt)
self.lf_H4.setToolTip(txt)
| 31.059406 | 85 | 0.597067 |
520b3a7caaad05f11ab28588b3997364884e6ed2 | 703 | py | Python | examples/color.py | damani42/plumbum | 55a5058ccc20745bae77db7b29ec7f1c1f9daef5 | [
"MIT"
] | null | null | null | examples/color.py | damani42/plumbum | 55a5058ccc20745bae77db7b29ec7f1c1f9daef5 | [
"MIT"
] | null | null | null | examples/color.py | damani42/plumbum | 55a5058ccc20745bae77db7b29ec7f1c1f9daef5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from plumbum import colors
with colors.fg.red:
print("This is in red")
print("This is completely restored, even if an exception is thrown!")
print("The library will restore color on exiting automatically.")
print(colors.bold["This is bold and exciting!"])
print(colors.bg.cyan | "This is on a cyan background.")
print(colors.fg[42] | "If your terminal supports 256 colors, this is colorful!")
print()
for c in colors:
print(c + "\u2588", end="")
colors.reset()
print()
print("Colors can be reset " + colors.underline["Too!"])
for c in colors[:16]:
print(c["This is in color!"])
colors.red()
print("This should clean up the color automatically on program exit...")
| 28.12 | 80 | 0.705548 |
cc571995e6a2663390d909d9d1342c2cec237a92 | 798 | py | Python | src/utils/list_images.py | Andrew-Chen-Wang/static-image-age-detection | 7dd0b353a7efb580f0120386c8db623d03533f90 | [
"Apache-2.0"
] | null | null | null | src/utils/list_images.py | Andrew-Chen-Wang/static-image-age-detection | 7dd0b353a7efb580f0120386c8db623d03533f90 | [
"Apache-2.0"
] | 1 | 2020-06-14T04:05:02.000Z | 2020-06-14T04:05:02.000Z | src/utils/list_images.py | Andrew-Chen-Wang/static-image-age-detection | 7dd0b353a7efb580f0120386c8db623d03533f90 | [
"Apache-2.0"
] | null | null | null | import os
from typing import List
def list_images(base_path: str) -> List[str]:
"""
Lists all image paths in specified directory.
Taken mostly from imutils with some adds/subtracts:
https://github.com/jrosebr1/imutils/blob/master/imutils/paths.py
"""
image_types = (".jpg", ".jpeg", ".png", ".bmp", ".tif", ".tiff")
for (rootDir, dirNames, filenames) in os.walk(base_path):
# loop over the filenames in the current directory
for filename in filenames:
# check to see if the file is an image and should be processed
if os.path.splitext(filename)[1].lower() in image_types:
# construct the path to the image and yield it
imagePath = os.path.join(rootDir, filename)
yield imagePath
| 38 | 74 | 0.636591 |
5379ed38d45e6b74ee4567a5d8ffd20dfccf9f47 | 3,583 | py | Python | ScidbLoader/scidb_load/test/test_scidb_loader.py | dllahr/scidb_python_utils | a0801c32959c44dc7acc19128f950f8702785d88 | [
"MIT"
] | null | null | null | ScidbLoader/scidb_load/test/test_scidb_loader.py | dllahr/scidb_python_utils | a0801c32959c44dc7acc19128f950f8702785d88 | [
"MIT"
] | null | null | null | ScidbLoader/scidb_load/test/test_scidb_loader.py | dllahr/scidb_python_utils | a0801c32959c44dc7acc19128f950f8702785d88 | [
"MIT"
] | null | null | null | """Copyright (C) 2012 David L. Lahr
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in the
Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE."""
import sys
sys.path.append('/opt/scidb/12.10/lib/')
import scidbapi
import scidb_load.scidb_loader as scidb_loader
import unittest
import scidb_load.scidb_load_column as scidb_load_column
import commands
import scidb_read.scidb_reader as scidb_reader
import scidb_load._utils_scidb_load as _utils_scidb_load
class TestScidbLoader(unittest.TestCase):
def test_load(self):
array_name = "testScidbLoaderArray"
scidb = scidbapi.connect("localhost", 1239)
utils = _utils_scidb_load.UtilsScidbLoad(scidb)
utils.remove_array_if_present(array_name)
first_dimension = "a"
second_dimension = "b"
attribute = "c"
#create the list of columns to be loaded
scidb_load_column_list = [scidb_load_column.ScidbLoadColumn(first_dimension, "int64", False),
scidb_load_column.ScidbLoadColumn(second_dimension, "int64", False),
scidb_load_column.ScidbLoadColumn(attribute, "double", True)]
loader = scidb_loader.ScidbLoader(scidb)
#get the path to the csv holding the test data
csv_file = "".join([commands.getoutput("pwd"), "/test_data.csv"])
#load the data from the csv file into the database
loader.load(scidb_load_column_list, csv_file, array_name)
#compare what was loaded to what we expected to be loaded
#first check format of created array
reader = scidb_reader.ScidbReader(scidb)
reader.read("show({})".format(array_name))
result = reader.next()[1][0]
reader.complete_query()
self.assertTrue(result.find("[{}=".format(first_dimension)) >= 0)
self.assertTrue(result.find(",{}=".format(second_dimension)) >= 0)
self.assertTrue(result.find("<{}:double>".format(attribute)) >= 0)
#second check values stored in array
attribute_dimension_list = [first_dimension, second_dimension, attribute]
#expected values from test_data.csv
expected = {first_dimension:(1,13), second_dimension:(2,17), attribute:(3,19)}
result = utils.get_min_and_max(attribute_dimension_list, array_name)
for attr_dim in attribute_dimension_list:
min_max_expected = expected[attr_dim]
min_max_found = result[attr_dim]
self.assertTupleEqual(min_max_expected, min_max_found)
scidb.disconnect()
if __name__ == '__main__':
unittest.main()
| 43.168675 | 102 | 0.704996 |
e8c24d60df64c9701a727ea2777eefb2614d44b2 | 382 | py | Python | python-src/logger.py | DEWMRAX/rbot | 2be7cf0757a07e5cf3ed1bd4e288d5d8646eca4a | [
"Unlicense",
"MIT"
] | 3 | 2021-03-18T21:18:32.000Z | 2022-01-19T19:23:24.000Z | python-src/logger.py | DEWMRAX/rbot | 2be7cf0757a07e5cf3ed1bd4e288d5d8646eca4a | [
"Unlicense",
"MIT"
] | null | null | null | python-src/logger.py | DEWMRAX/rbot | 2be7cf0757a07e5cf3ed1bd4e288d5d8646eca4a | [
"Unlicense",
"MIT"
] | 3 | 2020-09-13T21:21:04.000Z | 2021-11-15T17:59:23.000Z | import datetime
def record_event(s):
timestamp = '{:%m-%d,%H:%M:%S}'.format(datetime.datetime.now())
with open("events.txt", "a") as events:
events.write("%s,%s\n" % (timestamp, s))
def record_trade(s):
timestamp = '{:%m-%d,%H:%M:%S}'.format(datetime.datetime.now())
with open("trades.txt", "a") as trades:
trades.write("%s,%s\n" % (timestamp, s))
| 31.833333 | 67 | 0.586387 |
e02e473813e0f29062e8c8486e4e223b01fba724 | 159 | py | Python | offsite/accounts/models.py | wh1te909/backup-offsite | 694f773583eb825b44ff20c51598ac9e1106cd32 | [
"MIT"
] | 4 | 2021-01-20T15:45:35.000Z | 2021-07-09T02:15:31.000Z | offsite/accounts/models.py | wh1te909/backup-offsite | 694f773583eb825b44ff20c51598ac9e1106cd32 | [
"MIT"
] | 6 | 2020-08-02T23:31:07.000Z | 2021-09-22T19:19:50.000Z | offsite/accounts/models.py | wh1te909/backup-offsite | 694f773583eb825b44ff20c51598ac9e1106cd32 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
is_active = models.BooleanField(default=True)
| 22.714286 | 51 | 0.805031 |
c17e14ec0d186660f4c79f41bd8b097c8e8e088e | 21,455 | py | Python | meta_reward_learning/semantic_parsing/nsm/tf_utils.py | jrmendeshurb/google-research | f9fa8cdd2fb77975b524371fd29df008b9dc6cf4 | [
"Apache-2.0"
] | 6 | 2019-12-16T04:23:57.000Z | 2021-12-09T14:17:14.000Z | meta_reward_learning/semantic_parsing/nsm/tf_utils.py | jrmendeshurb/google-research | f9fa8cdd2fb77975b524371fd29df008b9dc6cf4 | [
"Apache-2.0"
] | 13 | 2020-01-28T22:19:53.000Z | 2022-02-10T00:39:26.000Z | meta_reward_learning/semantic_parsing/nsm/tf_utils.py | jrmendeshurb/google-research | f9fa8cdd2fb77975b524371fd29df008b9dc6cf4 | [
"Apache-2.0"
] | 1 | 2019-12-19T09:44:55.000Z | 2019-12-19T09:44:55.000Z | # coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TensorFlow utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import tensorflow as tf
from tensorflow.contrib import rnn as contrib_rnn
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
rnn = contrib_rnn
_BIAS_VARIABLE_NAME = 'biases'
_WEIGHTS_VARIABLE_NAME = 'weights'
ACTIVATION_DICT = dict(sigmoid=tf.nn.sigmoid, tanh=tf.nn.tanh)
def dice(x):
"""DiCE: The Infinitely Differentiable Monte-Carlo Estimator."""
return tf.exp(x - tf.stop_gradient(x))
def st_estimator(f, g):
"""Function which acts as f in the forward pass and g in the backward pass."""
return tf.stop_gradient(f - g) + g
def create_var_and_placeholder(name,
shape,
dtype,
trainable=False,
initializer=None,
default=None):
"""Creates a variable and a corresponding initializer op with placeholder."""
if default is not None:
placeholder = tf.placeholder_with_default(
tf.constant(default, dtype), shape=shape)
else:
placeholder = tf.placeholder(
dtype, shape=shape, name='{}_init_pc'.format(name))
variable = tf.get_variable(
name,
dtype=dtype,
shape=shape,
initializer=initializer,
trainable=trainable)
init_op = variable.assign(placeholder)
return variable, placeholder, init_op
def tensormul(t1, t2):
"""Basically matmul, but t1 can have more dimensions than t2."""
dim1 = t1.get_shape().as_list()[-1]
dim2 = t2.get_shape().as_list()[-1]
result_shape_tensors = tf.unstack(tf.shape(t1))
result_shape_tensors[-1] = dim2
result_shape_tensor = tf.stack(result_shape_tensors)
t1 = tf.reshape(t1, [-1, dim1])
result = tf.matmul(t1, t2)
result = tf.reshape(result, result_shape_tensors)
return result
@contextlib.contextmanager
def _checked_scope(cell, scope, reuse=None, **kwargs):
if reuse is not None:
kwargs['reuse'] = reuse
with vs.variable_scope(scope, **kwargs) as checking_scope:
scope_name = checking_scope.name
if hasattr(cell, '_scope'):
cell_scope = cell._scope # pylint: disable=protected-access
if cell_scope.name != checking_scope.name:
raise ValueError(
'Attempt to reuse RNNCell %s with a different variable scope than '
"its first use. First use of cell was with scope '%s', this "
"attempt is with scope '%s'. Please create a new instance of the "
'cell if you would like it to use a different set of weights. '
'If before you were using: MultiRNNCell([%s(...)] * num_layers), '
'change to: MultiRNNCell([%s(...) for _ in range(num_layers)]). '
'If before you were using the same cell instance as both the '
'forward and reverse cell of a bidirectional RNN, simply create '
'two instances (one for forward, one for reverse). '
"In May 2017, we will start transitioning this cell's behavior "
'to use existing stored weights, if any, when it is called '
'with scope=None (which can lead to silent model degradation, so '
'this error will remain until then.)' %
(cell, cell_scope.name, scope_name, type(cell).__name__,
type(cell).__name__))
else:
weights_found = False
try:
with vs.variable_scope(checking_scope, reuse=True):
vs.get_variable(_WEIGHTS_VARIABLE_NAME)
weights_found = True
except ValueError:
pass
if weights_found and reuse is None:
raise ValueError(
'Attempt to have a second RNNCell use the weights of a variable '
"scope that already has weights: '%s'; and the cell was not "
'constructed as %s(..., reuse=True). '
'To share the weights of an RNNCell, simply '
'reuse it in your second calculation, or create a new one with '
'the argument reuse=True.' % (scope_name, type(cell).__name__))
# Everything is OK. Update the cell's scope and yield it.
cell._scope = checking_scope # pylint: disable=protected-access
yield checking_scope
class SeqAttentionCellWrapper(tf.nn.rnn_cell.RNNCell):
"""Basic attention cell wrapper.
Implementation based on https://arxiv.org/abs/1409.0473.
"""
def __init__(self,
cell,
attn_inputs,
attn_size,
attn_vec_size,
output_size=None,
input_size=None,
state_is_tuple=True,
attn_masks=None,
merge_output_attn='linear',
reuse=None):
"""Create a cell with attention.
Args:
cell: an RNNCell, an attention is added to it.
attn_inputs: a Tensor.
attn_size: integer, the size of an attention vector. Equal to
cell.output_size by default.
attn_vec_size: integer, the number of convolutional features calculated on
attention state and a size of the hidden layer built from base cell
state. Equal to attn_size by default.
input_size: integer, the size of a hidden linear layer, built from inputs
and attention. Derived from the input tensor by default.
state_is_tuple: If True, accepted and returned states are n-tuples, where
`n = len(cells)`. By default (False), the states are all concatenated
along the column axis.
attn_mask: mask that should be applied to attention. If None, no masks
will be applied.
reuse: (optional) Python boolean describing whether to reuse variables in
an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if cell returns a state tuple but the flag
`state_is_tuple` is `False` or if attn_length is zero or less.
"""
if not isinstance(cell, rnn.RNNCell):
raise TypeError('The parameter cell is not RNNCell.')
if nest.is_sequence(cell.state_size) and not state_is_tuple:
raise ValueError(
'Cell returns tuple of states, but the flag '
'state_is_tuple is not set. State size is: %s' % str(cell.state_size))
if not state_is_tuple:
logging.warn(
'%s: Using a concatenated state is slower and will soon be '
'deprecated. Use state_is_tuple=True.', self)
self._state_is_tuple = state_is_tuple
if not state_is_tuple:
raise NotImplementedError
self._cell = cell
self._input_size = input_size
self._output_size = output_size
if output_size is None:
self._output_size = cell.output_size
self._attn_size = attn_size
self._reuse = reuse
self._attn_inputs = attn_inputs
self._attn_vec_size = attn_vec_size
self.attn_masks = attn_masks
self.merge_output_attn = merge_output_attn
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._output_size
def zero_state(self, batch_size, dtype=tf.float32):
zero_state = self._cell.zero_state(batch_size, dtype=dtype)
return zero_state
def __call__(self, inputs, state, scope=None):
"""Seq Attention wrapper."""
with _checked_scope(
self, scope or 'attention_cell_wrapper', reuse=self._reuse):
inner_output, new_state = self._cell(inputs, state)
new_attns = self._attention(inner_output, self._attn_inputs)
if self.merge_output_attn == 'linear':
with vs.variable_scope('attn_output_projection'):
output = linear([inner_output, new_attns], self._output_size, True)
elif self.merge_output_attn == 'concat':
output = tf.concat([inner_output, new_attns], axis=-1)
else:
raise ValueError(
'Unknown method to merge output and attention: {}'.format(
self.merge_output_attn))
return output, new_state
def _attention(self, query, attn_inputs):
with vs.variable_scope('attention'):
attn_query = tf.layers.dense(
inputs=query, units=self._attn_vec_size, use_bias=True)
attn_keys = tf.layers.dense(
inputs=attn_inputs, units=self._attn_vec_size, use_bias=True)
attn_contents = tf.layers.dense(
inputs=attn_inputs, units=self._attn_size, use_bias=True)
v_attn = vs.get_variable('attn_v', [self._attn_vec_size])
scores = attn_sum_bahdanau(v_attn, attn_keys, attn_query)
if self.attn_masks is not None:
score_masks = self.attn_masks
scores = scores * score_masks + (1.0 - score_masks) * tf.float32.min
attn_weights = nn_ops.softmax(scores)
new_attns = math_ops.reduce_sum(
tf.expand_dims(attn_weights, -1) * attn_contents, [1])
return new_attns
def attn_sum_bahdanau(v_attn, keys, query):
"""Calculates a batch and timewise dot product with a variable."""
return tf.reduce_sum(v_attn * tf.tanh(keys + tf.expand_dims(query, 1)), [2])
def attn_sum_dot(keys, query):
"""Calculates a batch and timewise dot product."""
return tf.reduce_sum(keys * tf.expand_dims(query, 1), [2])
def linear(args, output_size, bias, bias_start=0.0):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_start: starting value to initialize the bias; 0 by default.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError('`args` must be specified')
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError('linear is expecting 2D arguments: %s' % shapes)
if shape[1].value is None:
raise ValueError('linear expects shape[1] to be provided for shape %s, '
'but saw %s' % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=init_ops.constant_initializer(bias_start, dtype=dtype))
return nn_ops.bias_add(res, biases)
class ScoreWrapper(tf.nn.rnn_cell.RNNCell):
"""Creates a cell which outputs a scalar score value at each time step."""
def __init__(self, cell, activation='tanh'):
self._cell = cell
self._activation = ACTIVATION_DICT[activation]
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return (self._cell.output_size, 1)
def __call__(self, inputs, state, *args, **kwargs):
inner_output, next_state = self._cell(inputs, state, *args, **kwargs)
with vs.variable_scope('score_fn', reuse=tf.AUTO_REUSE):
# No need for explicitly stop_gradient since the gradients contribution
# from scores is stopped now using stop_gradients in `tf.gradients`
score_input = inner_output
# Moving to a 2 layer architechture since the one layer didn't improve
# the meta learning loss much
score_val = tf.layers.dense(
score_input,
units=16,
use_bias=True,
activation=tf.nn.relu,
kernel_initializer=tf.ones_initializer(),
bias_initializer=tf.ones_initializer())
score_val = tf.layers.dense(
score_val,
units=1,
use_bias=True,
activation=self._activation,
kernel_initializer=tf.ones_initializer(),
bias_initializer=tf.ones_initializer())
output = (inner_output, score_val)
return output, next_state
MemoryStateTuple = collections.namedtuple('MemoryStateTuple',
('memory', 'inner_state'))
MemoryInputTuple = collections.namedtuple(
'MemoryInputTuple', ('read_ind', 'write_ind', 'valid_indices'))
class MemoryWrapper(tf.nn.rnn_cell.RNNCell):
"""Augment RNNCell with a memory that the RNN can write to and read from.
Each time step, 3 things are happening:
1) the RNNCell reads from one memory location (read_ind)
as input to the inner RNN.
2) It also writes the output of the inner RNN to one
memory location (write_ind). 1 indicates no writing.
3) It use the output of the inner RNN to compute the
logits for the valid_indices, which will be used as input
to compute a softmax distribution over them. Note that
valid_indices always has the dimension
max_n_valid_indices, use -1 to pad the dimensions the
actual number of valid indices are less.
"""
def __init__(self,
cell,
mem_size,
embed_size,
max_n_valid_indices,
use_score_wrapper=False,
**kwargs):
"""Constructs a `ResidualWrapper` for `cell`.
Args:
cell: An instance of `RNNCell`.
mem_size: size of the memory.
embed_size: the size/dimension of the embedding in each memory location.
max_n_valid_indices: maximum number of valid_indices.
use_score_wrapper: Whether a score wrapper was used prior to passing the
cell.
**kwargs: Keyword arguments for score wrapper.
"""
self._use_score_wrapper = use_score_wrapper
if use_score_wrapper:
self._cell = ScoreWrapper(cell, **kwargs)
else:
self._cell = cell
self._mem_size = mem_size
self._embed_size = embed_size
self._max_n_valid_indices = max_n_valid_indices
@property
def state_size(self):
# This will be used to create zero states.
return MemoryStateTuple(
tf.TensorShape([self._mem_size, self._embed_size]),
self._cell.state_size)
@property
def output_size(self):
# The output is the logits of the valid_dices.
if self._use_score_wrapper:
return (self._max_n_valid_indices, 1)
else:
return self._max_n_valid_indices
def __call__(self, inputs, state, scope=None, debug=False):
"""Unroll the memory augmented cell for one step.
Args:
inputs: (Possibly nested tuple of) Tensor, the input at this time step.
state: An instance of MemoryStateTuple containing tensors from the
previous time step.
"""
# B is batch size.
# 1) Use read_ind to find memory location to read from
# as input.
# inputs.read_ind: (B, 1)
# memory: (B, mem_size, embed_size)
read_ind = tf.to_int32(inputs.read_ind)
batch_size = tf.shape(read_ind)[0]
if debug:
print('batch size is', batch_size)
print('read ind is', read_ind)
mem_ind = tf.range(batch_size)
# read_mem_ind: (B, 1)
read_mem_ind = tf.expand_dims(mem_ind, axis=1)
# read_ind: (B, 2)
read_ind = tf.concat([read_mem_ind, read_ind], axis=1)
if debug:
print('processed read ind is', read_ind)
# inner_inputs: (B, embed_size)
inner_inputs = tf.gather_nd(state.memory, read_ind)
if debug:
print('inner_inputs is', inner_inputs)
inner_state = state.inner_state
# 2) Run the inner RNNCell.
# inner_outputs: (B, embed_size)
cell_outputs, new_inner_state = self._cell(
inner_inputs, inner_state, scope=scope)
if self._use_score_wrapper:
inner_outputs, score_val = cell_outputs
else:
inner_outputs = cell_outputs
if debug:
print('inner_outputs is', inner_outputs)
# 3) Compute logits for valid indices (using logit_masks
# to mask out padded valid indices (-1)).
# valid_indices: (B, max_n_valid_indices)
valid_indices = tf.to_int32(inputs.valid_indices)
if debug:
print('valid_indices is', valid_indices)
# Logit mask: (B, max_n_valid_indices)
logit_masks = tf.greater_equal(inputs.valid_indices, 0)
logit_masks = tf.cast(logit_masks, tf.float32)
if debug:
print('logit_masks is', logit_masks)
# Normalize indices to be at least 0.
valid_indices = tf.maximum(valid_indices, 0)
# valid_indices: (B, max_n_valid_indices, 1)
valid_indices = tf.expand_dims(valid_indices, -1)
if debug:
print('valid_indices is', valid_indices)
print('mem_ind is', mem_ind)
valid_mem_ind = tf.expand_dims(mem_ind, axis=1)
# valid_mem_ind: (B, 1, 1)
valid_mem_ind = tf.expand_dims(valid_mem_ind, axis=2)
if debug:
print('valid_mem_ind is', valid_mem_ind)
# valid_mem_ind: (B, max_n_valid_indices, 1)
valid_mem_ind = tf.tile(valid_mem_ind, [1, self._max_n_valid_indices, 1])
# valid_indices: (B, max_n_valid_indices, 2)
# Third dimension of valid_indices is [b_i, valid_index] so that it can
# index into the right memory location.
valid_indices = tf.concat([valid_mem_ind, valid_indices], axis=2)
if debug:
print('valid_indices is', valid_indices)
# select all the valid slots.
# valid_values: (B, max_n_valid_indices, embed_size)
valid_values = tf.gather_nd(state.memory, valid_indices)
if debug:
print('valid_values is', valid_values)
# expanded_inner_outputs: (B, 1, embed_size)
expanded_inner_outputs = tf.expand_dims(inner_outputs, 1)
if debug:
print('expanded_inner_outputs is', expanded_inner_outputs)
# valid_values: (B, embed_size, max_n_valid_indices)
valid_values = tf.transpose(valid_values, [0, 2, 1])
if debug:
print('valid_values is', valid_values)
# logits: (B, 1, max_n_valid_indices)
logits = tf.matmul(expanded_inner_outputs, valid_values)
if debug:
print('logits is', logits)
# logits: (B, max_n_valid_indices)
logits = tf.squeeze(logits, axis=[1])
if debug:
print('logits is', logits)
# masked_logits = (logits * logit_masks) - (1 - logit_masks) * 1e6
masked_logits = logits * logit_masks + (1 - logit_masks) * tf.float32.min
# masked_logits = tf.Print(masked_logits, [masked_logits],
# message='masked_logits')
outputs = masked_logits
# 4) Write the output of the inner RNN to a memory
# location (write_ind), using write_masks to mask out
# padded write_ind (-1).
# write_ind: (B, 1)
write_ind = tf.cast(inputs.write_ind, tf.int32)
if debug:
print('write_ind is', write_ind)
# write mask: (B, 1)
write_masks = tf.greater_equal(inputs.write_ind, 0)
if debug:
print('write_masks greater_equal', write_masks)
write_masks = tf.cast(write_masks, tf.float32)
# write mask: (B, 1, 1)
write_masks = tf.expand_dims(write_masks, [-1])
if debug:
print('write_masks is', write_masks)
# Normalize write_ind to be above 0.
# write_ind: (B, 1)
write_ind = tf.maximum(write_ind, 0)
# write_mem_ind: (B, 1)
write_mem_ind = tf.expand_dims(mem_ind, axis=1)
# write_ind: (B, 2)
# Second dimension is [b_i, write_index]
write_ind = tf.concat([write_mem_ind, write_ind], axis=1)
if debug:
print('write_ind is', write_ind)
if debug:
print('masked_logits is', masked_logits)
print('memory is', state.memory)
# write_mat: (B, mem_size, embed_size)
write_mat = tf.scatter_nd(
write_ind, inner_outputs, shape=tf.shape(state.memory))
if debug:
print('@' * 50)
print('write_mat is', write_mat)
print('write_mask is', write_masks)
print('@' * 50)
masked_write_mat = write_mat * write_masks
new_memory = state.memory + masked_write_mat
state = MemoryStateTuple(new_memory, new_inner_state)
if debug:
print('state is', state)
if self._use_score_wrapper:
outputs = (outputs, score_val)
return (outputs, state)
| 34.942997 | 80 | 0.672431 |
3605916ede1f8cde48980442a8d5ed50d3bd314d | 1,566 | py | Python | Basic Data Structures/array/leet_682_BaseballGame.py | rush2catch/algorithms-leetcode | 38a5e6aa33d48fa14fe09c50c28a2eaabd736e55 | [
"MIT"
] | null | null | null | Basic Data Structures/array/leet_682_BaseballGame.py | rush2catch/algorithms-leetcode | 38a5e6aa33d48fa14fe09c50c28a2eaabd736e55 | [
"MIT"
] | null | null | null | Basic Data Structures/array/leet_682_BaseballGame.py | rush2catch/algorithms-leetcode | 38a5e6aa33d48fa14fe09c50c28a2eaabd736e55 | [
"MIT"
] | null | null | null | # Problem: Baseball Game
# Difficulty: Easy
# Category: Array
# Leetcode 682: https://leetcode.com/problems/baseball-game/description/
# Description:
"""
You're now a baseball game point recorder.
Given a list of strings, each string can be one of the 4 following types:
Integer (one round's score): Directly represents the number of points you get in this round.
"+" (one round's score): Represents that the points you get in this round are the sum of the last two valid round's points.
"D" (one round's score): Represents that the points you get in this round are the doubled data of the last valid round's points.
"C" (an operation, which isn't a round's score): Represents the last valid round's points you get were invalid and should be removed.
Each round's operation is permanent and could have an impact on the round before and the round after.
You need to return the sum of the points you could get in all the rounds.
Example 1:
Input: ["5","2","C","D","+"]
Output: 30
Explanation:
Round 1: You could get 5 points. The sum is: 5.
Round 2: You could get 2 points. The sum is: 7.
Operation 1: The round 2's data was invalid. The sum is: 5.
Round 3: You could get 10 points (the round 2's data has been removed). The sum is: 15.
Round 4: You could get 5 + 10 = 15 points. The sum is: 30.
"""
class Solution(object):
def cal_points(self, ops):
arr = []
for op in ops:
if op == 'C':
arr.pop()
elif op == 'D':
arr.append(arr[-1] * 2)
elif op == '+':
arr.append(arr[-1] + arr[-2])
else:
arr.append(int(op))
return sum(arr) | 37.285714 | 133 | 0.697957 |
ef7cbd69748b27fb829eac970a670bd117aab68f | 9,131 | py | Python | notebooks/python/cloudsat_precip.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null | notebooks/python/cloudsat_precip.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null | notebooks/python/cloudsat_precip.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null |
# coding: utf-8
# <h1>Table of Contents<span class="tocSkip"></span></h1>
# <div class="toc"><ul class="toc-item"><li><span><a href="#Introduction" data-toc-modified-id="Introduction-1"><span class="toc-item-num">1 </span>Introduction</a></span></li><li><span><a href="#Read-in-the-height-and-reflectivity-fields" data-toc-modified-id="Read-in-the-height-and-reflectivity-fields-2"><span class="toc-item-num">2 </span>Read in the height and reflectivity fields</a></span></li><li><span><a href="#Now-go-get-the-rain-rate" data-toc-modified-id="Now-go-get-the-rain-rate-3"><span class="toc-item-num">3 </span>Now go get the rain rate</a></span></li><li><span><a href="#Make-a-masked-array-of-the-reflectivity-so-that-pcolormesh-will-plot-it" data-toc-modified-id="Make-a-masked-array-of-the-reflectivity-so-that-pcolormesh-will-plot-it-4"><span class="toc-item-num">4 </span>Make a masked array of the reflectivity so that pcolormesh will plot it</a></span></li><li><span><a href="#Find-the-part-of-the-orbiting-that-corresponds-to-the-3-minutes-containing-the-storm" data-toc-modified-id="Find-the-part-of-the-orbiting-that-corresponds-to-the-3-minutes-containing-the-storm-5"><span class="toc-item-num">5 </span>Find the part of the orbiting that corresponds to the 3 minutes containing the storm</a></span></li><li><span><a href="#convert-time-to-distance-by-using-pyproj-to-get-the-greatcircle-distance-between-shots" data-toc-modified-id="convert-time-to-distance-by-using-pyproj-to-get-the-greatcircle-distance-between-shots-6"><span class="toc-item-num">6 </span>convert time to distance by using pyproj to get the greatcircle distance between shots</a></span></li><li><span><a href="#Make-the-plot-assuming-that-height-is-the-same-for-every-shot" data-toc-modified-id="Make-the-plot-assuming-that-height-is-the-same-for-every-shot-7"><span class="toc-item-num">7 </span>Make the plot assuming that height is the same for every shot</a></span></li><li><span><a href="#Now-add-the-rain-rate" data-toc-modified-id="Now-add-the-rain-rate-8"><span class="toc-item-num">8 </span>Now add the rain rate</a></span></li><li><span><a href="#Repeat-for-precipitatable-liquid-water-and-retrieval-uncertainty" data-toc-modified-id="Repeat-for-precipitatable-liquid-water-and-retrieval-uncertainty-9"><span class="toc-item-num">9 </span>Repeat for precipitatable liquid water and retrieval uncertainty</a></span></li></ul></div>
# In[1]:
from IPython.core.debugger import set_trace
# # Introduction
#
# This notebook uses the 2C-RAIN-PROFILE data to compare rain rate and precipitable liquid water with
# the cloudsat reflectivity.
# # Read in the height and reflectivity fields
# In[2]:
from importlib import reload
import numpy as np
import datetime as dt
from datetime import timezone as tz
from matplotlib import pyplot as plt
import pyproj
from numpy import ma
import a301
from a301.cloudsat import get_geo
from pathlib import Path
#
# new functions to read vdata and sds arrays
#
from a301.cloudsat import HDFvd_read, HDFsd_read
plt.style.use('ggplot')
# In[3]:
z_file= list(a301.test_dir.glob('*GEOPROF_GRANULE*hdf'))[0]
lats,lons,date_times,prof_times,dem_elevation=get_geo(z_file)
radar_reflectivity, radar_attrs = HDFsd_read(z_file,'Radar_Reflectivity')
radar_scale = radar_attrs['factor']
radar_missing = radar_attrs['missing']
radar_height, height_attrs = HDFsd_read(z_file,'Height')
meters2km=1.e3
# # Now go get the rain rate
#
# mask all values
# In[4]:
r_file= list(a301.test_dir.glob('*2C-RAIN-PROFILE*hdf'))[0]
rain_rate = HDFvd_read(r_file,'rain_rate',vgroup='Data Fields')
invalid = (rain_rate == -9999.)
rain_rate[invalid] = np.nan
hit = rain_rate < 0.
rain_rate[hit] = np.abs(rain_rate[hit])
plt.plot(rain_rate);
# # Make a masked array of the reflectivity so that pcolormesh will plot it
#
# note that I need to find the missing data before I divide by factor=100 to
# convert from int16 to float
# In[5]:
hit=(radar_reflectivity == radar_missing)
radar_reflectivity=radar_reflectivity.astype(np.float)
radar_reflectivity[hit]=np.nan
zvals = radar_reflectivity/radar_scale
zvals=ma.masked_invalid(zvals)
# # Find the part of the orbiting that corresponds to the 3 minutes containing the storm
#
# You need to enter the start_hour and start_minute for the start time of your cyclone in the granule
# In[ ]:
first_time=date_times[0]
print(f'orbit start: {first_time}')
start_hour=6
start_minute=45
storm_start=starttime=dt.datetime(first_time.year,first_time.month,first_time.day,
start_hour,start_minute,0,tzinfo=tz.utc)
#
# get 3 minutes of data from the storm_start
#
storm_stop=storm_start + dt.timedelta(minutes=3)
print(f'storm start: {storm_start}')
time_hit = np.logical_and(date_times > storm_start,date_times < storm_stop)
storm_lats = lats[time_hit]
storm_lons=lons[time_hit]
storm_prof_times=prof_times[time_hit]
storm_zvals=zvals[time_hit,:]
storm_height=radar_height[time_hit,:]
storm_date_times=date_times[time_hit]
rain_rate=rain_rate[time_hit]
set_trace()
# # convert time to distance by using pyproj to get the greatcircle distance between shots
# In[ ]:
great_circle=pyproj.Geod(ellps='WGS84')
distance=[0]
start=(storm_lons[0],storm_lats[0])
for index in np.arange(1,len(storm_lons)):
azi12,azi21,step= great_circle.inv(storm_lons[index-1],storm_lats[index-1],
storm_lons[index],storm_lats[index])
distance.append(distance[index-1] + step)
distance=np.array(distance)/meters2km
# # Make the plot assuming that height is the same for every shot
#
# We need to customize the subplots so we can share the x axis between the radar reflectivity
# and the rain_rate, and adjust the sizes to hold a colorbar
# In[ ]:
get_ipython().run_line_magic('matplotlib', 'inline')
from matplotlib import cm
from matplotlib.colors import Normalize
from mpl_toolkits.axes_grid1 import make_axes_locatable
def plot_field2(distance,height,field,fig,cmap=None,norm=None):
"""
draw a 2 panel plot with different panel sizes. Put the radar reflectivity
in the top panel with a colorbar along the bottom, and pass the second
axis back to be filled in later
uses the sharex keyword to give both plots the same x axis (distance)
and the gridspec class to lay out the grid
https://stackoverflow.com/questions/10388462/matplotlib-different-size-subplots
"""
from matplotlib import gridspec
gs = gridspec.GridSpec(2, 1, height_ratios=[2, 1])
ax1 = fig.add_subplot(gs[0])
ax2 = fig.add_subplot(gs[1],sharex=ax1)
if cmap is None:
cmap=cm.inferno
col=ax1.pcolormesh(distance,height,field,cmap=cmap,
norm=the_norm)
#https://stackoverflow.com/questions/18195758/set-matplotlib-colorbar-size-to-match-graph
# create an axes on the bottom side of ax1. The height of cax will be 5%
# of ax and the padding between cax and ax will be fixed at 0.55 inch.
divider = make_axes_locatable(ax1)
cax = divider.append_axes("bottom", size="5%", pad=0.55)
ax1.figure.colorbar(col,extend='both',cax=cax,orientation='horizontal')
return ax1, ax2
vmin=-30
vmax=20
the_norm=Normalize(vmin=vmin,vmax=vmax,clip=False)
cmap_ref=cm.plasma
cmap_ref.set_over('c')
cmap_ref.set_under('b',alpha=0.2)
cmap_ref.set_bad('0.75') #75% grey
cloud_height_km=radar_height[0,:]/meters2km
fig = plt.figure(figsize=(15, 8))
ax1, ax2 = plot_field2(distance,cloud_height_km,storm_zvals.T,fig,cmap=cmap_ref,norm=the_norm)
ax1.set(ylim=[0,17],xlim=(0,1200))
ax1.set(xlabel='distance (km)',ylabel='height (km)',
title='equivalent radar reflectivity in dbZe');
# # Now add the rain rate
#
# Use the second axis to draw the rain rate and redraw the figure
# In[ ]:
ax2.plot(distance,rain_rate)
ax2.set(xlabel='distance (km)',ylabel='rain rate (mm/hour)')
display(fig)
# # Repeat for precipitatable liquid water and retrieval uncertainty
#
# Make a new plot pair -- for variable information see the [dataset docs](http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-rain-profile?term=56)
# In[ ]:
liquid_water, lw_attributes = HDFsd_read(r_file,'precip_liquid_water')
lw_attributes
# In[ ]:
precip_uncertainty = HDFvd_read(r_file,'rain_rate_uncertainty',vgroup='Data Fields')
storm_liquid=liquid_water[time_hit,:]/lw_attributes['factor']
precip_uncert=precip_uncertainty[time_hit]
vmin=0.01
vmax=0.5
the_norm=Normalize(vmin=vmin,vmax=vmax,clip=False)
cmap_ref=cm.plasma
cmap_ref.set_over('w')
cmap_ref.set_under('b',alpha=0.02)
cmap_ref.set_bad('0.75') #75% grey
fig = plt.figure(figsize=(15, 8))
ax1, ax2 = plot_field2(distance,cloud_height_km,storm_liquid.T,fig,cmap=cmap_ref,norm=the_norm)
ax1.set(ylim=[0,6],xlim=(0,1200))
ax1.set(xlabel='distance (km)',ylabel='height (km)',
title='precipitable liquid water content (g/m^3)')
ax2.plot(distance,precip_uncert)
ax2.set(ylim=[0,15],xlabel='distance (km)',ylabel='rain rate uncertainty (%)');
# In[ ]:
lw_attributes
| 38.690678 | 2,445 | 0.74154 |
7340c026653ed2430668da293c7f72ad4bd9ee64 | 1,326 | py | Python | fplcoin/encodings.py | vishnutalanki/fplcoin | d2ce6ce612497ae3fe24576ab5369d32f39e931a | [
"MIT"
] | null | null | null | fplcoin/encodings.py | vishnutalanki/fplcoin | d2ce6ce612497ae3fe24576ab5369d32f39e931a | [
"MIT"
] | null | null | null | fplcoin/encodings.py | vishnutalanki/fplcoin | d2ce6ce612497ae3fe24576ab5369d32f39e931a | [
"MIT"
] | null | null | null | # from https://pypi.python.org/pypi/base58/0.2.2
import fplcoin
class b58encoder(object):
""" Base58 encoder for fplcoin """
def __init__(self):
self.alphabet = 'zyxwvutsrqponmkjihgfedcbaZYXWVUTSRQPNMLKJHGFEDCBA987654321'
self.iseq = lambda s: map(ord, s)
self.bseq = lambda s: ''.join(map(chr, s))
def b58encode(self, v):
'''Encode a string using Base58'''
origlen = len(v)
v = v.lstrip(b'\0')
newlen = len(v)
p, acc = 1, 0
for c in self.iseq(v[::-1]):
acc += p * c
p = p << 8
result = ''
while acc > 0:
acc, mod = divmod(acc, 58)
result += self.alphabet[mod]
return (result + self.alphabet[0] * (origlen - newlen))[::-1]
def b58decode(self, v):
'''Decode a Base58 encoded string'''
if not isinstance(v, str):
v = v.decode('ascii')
origlen = len(v)
v = v.lstrip(self.alphabet[0])
newlen = len(v)
p, acc = 1, 0
for c in v[::-1]:
acc += p * self.alphabet.index(c)
p *= 58
result = []
while acc > 0:
acc, mod = divmod(acc, 256)
result.append(mod)
return (self.bseq(result) + b'\0' * (origlen - newlen))[::-1]
| 22.862069 | 84 | 0.496983 |
6a73d31980630393b32e22466e488de5ae2f89a5 | 5,588 | py | Python | tools/extract_changelog.py | delta-things/mos | 7ef8e79db5a169a0aafd48502087afe2ce7e70a3 | [
"Apache-2.0"
] | null | null | null | tools/extract_changelog.py | delta-things/mos | 7ef8e79db5a169a0aafd48502087afe2ce7e70a3 | [
"Apache-2.0"
] | null | null | null | tools/extract_changelog.py | delta-things/mos | 7ef8e79db5a169a0aafd48502087afe2ce7e70a3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (c) 2014-2018 Cesanta Software Limited
# All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the ""License"");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an ""AS IS"" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import multiprocessing
import os
import shutil
import subprocess
import sys
import tempfile
import github_api
token = ""
parser = argparse.ArgumentParser(description="")
parser.add_argument("--from", required=True, help = "Starting point (hash-like)")
parser.add_argument("--to", default="HEAD", help = "End point (hash-like)")
parser.add_argument("--no-pull", action="store_true", default=False,
help="Do not pull repos if exist, useful for repeated runs")
parser.add_argument("--tmpdir", type=str, default=os.path.expanduser("~/mos_release_tmp"))
parser.add_argument("--parallelism", type=int, default=32)
parser.add_argument("--token_filepath", type=str, default="/secrets/github/cesantabot/github_token")
parser.add_argument("repo", type=str, nargs="*", help="Run on specific repos. If not specified, runs on all repos.")
args = parser.parse_args()
TOKEN = "file:%s" % args.token_filepath
def get_repos(org):
repos = []
page = 1
print("Listing repos under %s..." % org, file=sys.stderr)
while True:
# Get repos on the current "page"
print(" %d..." % page, file=sys.stderr)
r, ok = github_api.CallUsersAPI(org, TOKEN, "/repos", params={"page": page})
if len(r) == 0:
# No more repos, we're done
break
repos += r
page += 1
return repos
def get_repo_names(org):
repo_names = []
for repo in get_repos(org):
repo_names.append(repo["full_name"])
return repo_names
def handle_repo(repo_name, from_tag, to_tag):
local_path = os.path.join(args.tmpdir, repo_name)
if not os.path.isdir(local_path):
print("%s: Cloning to %s" % (repo_name, local_path), file=sys.stderr)
subprocess.check_call(["git", "clone", "git@github.com:%s" % repo_name, local_path])
elif not args.no_pull:
print("%s: Pulling %s" % (repo_name, local_path), file=sys.stderr)
subprocess.check_call(["git", "-C", local_path, "pull", "-q"])
cmd = ["git", "-C", local_path, "log", "--reverse", "--pretty=format:%H %ct%n%s%n%b%n---CUT---"]
# See if "from" exists at all
diff_res = subprocess.run(["git", "-C", local_path, "diff", from_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# If the starting point exists, restrict the range, otherwise
# we assume taht the repo was created after from was created, so get the entire log.
if diff_res.returncode == 0:
cmd.append("%s...%s" % (from_tag, to_tag))
log = subprocess.check_output(cmd)
res = []
ph, ch, ts, cl = 0, None, 0, ""
for line in log.splitlines():
line = str(line, "utf-8")
if line == "---CUT---":
if cl and cl != "none":
res.append((ts, "[%s@%s](https://github.com/%s/commit/%s)" % (repo_name, ch[:7], repo_name, ch), cl))
ph, ch, ts, cl = 0, None, 0, ""
continue
if ph == 0:
ch, ts = line.split()
ts = int(ts)
ph = 1
elif ph == 1:
if line.startswith("CL: "):
cl = line[4:]
ph = 2
elif ph == 2:
if line.startswith(" "):
cl += "\n%s" % line.strip()
elif line.startswith("CL: "):
cl += "\n%s" % line[4:].strip()
return res
def handle_repo_noexc(repo_name, from_tag, to_tag):
try:
return (repo_name, handle_repo(repo_name, from_tag, to_tag))
except Exception as e:
return (repo_name, str(e))
repos = args.repo
if not repos:
# Get libs and apps repos
repos = get_repo_names("mongoose-os-libs") + get_repo_names("mongoose-os-apps")
# Add a few more
repos.extend(["cesanta/mongoose-os", "cesanta/mjs", "mongoose-os/mos"])
repos.sort()
print("Repos: %s" % " ".join(repos), file=sys.stderr)
pool = multiprocessing.Pool(processes=args.parallelism)
results = []
for repo_name in repos:
results.append((repo_name, pool.apply_async(handle_repo_noexc, [repo_name, getattr(args, "from"), args.to])))
# Wait for all tasks to complete, and collect errors
global_cl, errs = [], []
for res in results:
repo, r = res[1].get()
if type(r) is str:
errs.append(r)
else:
for e in r:
global_cl.append(e)
# Dedup entries. When a single change affect multiple repos, we'll get dups.
cl_map = {}
for ts, ch, cl in sorted(global_cl, key=lambda e: e[0]):
oe = cl_map.get(cl)
if oe:
oe[2].append(ch)
else:
cl_map[cl] = (ts, cl, [ch])
for e in sorted(cl_map.values(), key=lambda e: e[0]):
print(" * %s (%s)" % ("\n ".join(e[1].splitlines()), " ".join(e[2])))
if len(errs) != 0:
print("------------------------------------------------------")
print("Errors: %d" % len(errs))
for err in errs: # Replace `None` as you need.
print("ERROR in %s: %s" % (err[0], err[1]))
print("---")
exit(1)
| 33.065089 | 122 | 0.60791 |
2f1e42af65663bab20edab1bd239559c7e15bb63 | 5,016 | py | Python | pyVim/fm.py | xweichu/pyvmomi | 77aedef02974a63517a079c482e49fd9890c09a4 | [
"Apache-2.0"
] | null | null | null | pyVim/fm.py | xweichu/pyvmomi | 77aedef02974a63517a079c482e49fd9890c09a4 | [
"Apache-2.0"
] | null | null | null | pyVim/fm.py | xweichu/pyvmomi | 77aedef02974a63517a079c482e49fd9890c09a4 | [
"Apache-2.0"
] | null | null | null | ## @file fm.py
## @brief Synchronous FileManager functions
##
## Detailed description (for Doxygen goes here)
"""
Synchronous FileManager functions.
Detailed description (for pydoc goes here)
"""
def Move(host, srcDatacenter, srcFile, dstDatacenter, dstFile, force,
fileType):
"""
Move a file from (srcDatacenter, srcFile) to (dstDatacenter, dstFile).
@type host: [in] C{L{pyVim.vimhost.Host}}
@param host:
@type srcDatacenter: [in] str
@param srcDatacenter: The datacenter of the source path
@type srcFile: [in] str
@param srcFile: The datastore path for the source.
@type dstDatacenter: [in] str
@param dstDatacenter: The datacenter of the destination path.
@type dstFile: [in] str
@param dstFile: The datastore path for the destination.
@type force: [in] bool
@param force: If True, overwrite any identically
named file or disk at the destination.
@type fileType: [in] str
@param fileType: The type of file to be considered
for the operation.
"""
PrintDebugInfo(functionName="pyVim.fm.Move",
srcDatacenter=srcDatacenter,
srcFile=srcFile,
dstDatacenter=dstDatacenter,
dstFile=dstFile,
force=force,
fileType=fileType)
host.InvokeAndTrack(host.GetFileManager().Move, srcDatacenter, srcFile,
dstDatacenter, dstFile, force, fileType)
def PrintDebugInfo(functionName, **kwargs):
"""
Debug function that prints a list of key/value pairs.
"""
if False:
for k, v in list(kwargs.items()):
print("*** %s: %s = '%s'" % (functionName, k, v))
def Copy(host, srcDatacenter, srcFile, dstDatacenter, dstFile, force,
fileType):
"""
Copy a file from (srcDatacenter, srcFile) to (dstDatacenter, dstFile).
@type host: [in] C{L{pyVim.vimhost.Host}}
@param host:
@type srcDatacenter: [in] str
@param srcDatacenter: The datacenter of the source path
@type srcFile: [in] str
@param srcFile: The datastore path for the source.
@type dstDatacenter: [in] str
@param dstDatacenter: The datacenter of the destination path.
@type dstFile: [in] str
@param dstFile: The datastore path for the destination.
@type force: [in] bool
@param force: If True, overwrite any identically
named file or disk at the destination.
@type fileType: [in] str
@param fileType: The type of file to be considered
for the operation.
"""
PrintDebugInfo(functionName="pyVim.fm.Copy",
srcDatacenter=srcDatacenter,
srcFile=srcFile,
dstDatacenter=dstDatacenter,
dstFile=dstFile,
force=force,
fileType=fileType)
host.InvokeAndTrack(host.GetFileManager().Copy, srcDatacenter, srcFile,
dstDatacenter, dstFile, force, fileType)
def Delete(host, srcDatacenter, srcFile, force, fileType):
"""
Delete a file from (srcDatacenter, srcFile).
@type host: [in] C{L{pyVim.vimhost.Host}}
@param host:
@type srcDatacenter: [in] str
@param srcDatacenter: The datacenter of the source path
@type srcFile: [in] str
@param srcFile: The datastore path for the source.
@type force: [in] bool
@param force: If True, overwrite any identically
named file or disk at the destination.
@type fileType: [in] str
@param fileType: The type of file to be considered
for the operation.
"""
PrintDebugInfo(functionName="pyVim.fm.Delete",
srcDatacenter=srcDatacenter,
srcFile=srcFile,
force=force,
fileType=fileType)
host.InvokeAndTrack(host.GetFileManager().Delete, srcDatacenter, srcFile,
force, fileType)
def ChangeOwner(host, srcFile, srcDatacenter, owner):
"""
Change the owner of file (srcDatacenter, srcFile).
@type host: [in] C{L{pyVim.vimhost.Host}}
@param host:
@type srcDatacenter: [in] str
@param srcDatacenter: The datacenter of the source path
@type srcFile: [in] str
@param srcFile: The datastore path for the source.
@type owner: [in] str
@param owner: [in] User name to set as owner of file/folder.
"""
PrintDebugInfo(functionName="pyVim.fm.ChangeOwner",
srcFile=srcFile,
srcDatacenter=srcDatacenter,
owner=owner)
return host.GetFileManager().ChangeOwner(srcFile, srcDatacenter, owner)
| 37.714286 | 77 | 0.583134 |
62cbaff9106c4b3ac7e804e3b90c37aa3949a55d | 559 | py | Python | packages/pyright-internal/src/tests/samples/annotations6.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 3,934 | 2019-03-22T09:26:41.000Z | 2019-05-06T21:03:08.000Z | packages/pyright-internal/src/tests/samples/annotations6.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 107 | 2019-03-24T04:09:37.000Z | 2019-05-06T17:00:04.000Z | packages/pyright-internal/src/tests/samples/annotations6.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 119 | 2019-03-23T10:48:04.000Z | 2019-05-06T08:57:56.000Z | # This sample verifies that the Type[] and type[] annotations work
# as expected when the type argument is Any.
from typing import Type, Any
def is_type1(x: object, y: Type[Any]) -> bool:
return isinstance(x, y)
is_type1(1, int)
# This should generate an error.
is_type1(1, 1)
def is_type2(x: object, y: type[Any]) -> bool:
return isinstance(x, y)
is_type2(1, int)
# This should generate an error.
is_type2(1, 1)
def func1(v1: Type[Any], v2: type[Any]):
reveal_type(v1, expected_text="type")
reveal_type(v2, expected_text="type")
| 18.633333 | 66 | 0.686941 |
57aace56d81ea350127d3fcd6c679630ab5b984f | 72 | py | Python | test/test_scripts/__init__.py | asmeenray/autodq | 508cd51eaf2ba647f43219a8ef6313198c8fbc4d | [
"Apache-2.0"
] | 81 | 2020-06-29T08:47:58.000Z | 2022-03-27T19:15:27.000Z | test/test_scripts/__init__.py | asmeenray/autodq | 508cd51eaf2ba647f43219a8ef6313198c8fbc4d | [
"Apache-2.0"
] | 99 | 2018-10-03T01:32:29.000Z | 2020-03-01T15:20:20.000Z | test/test_scripts/__init__.py | asmeenray/autodq | 508cd51eaf2ba647f43219a8ef6313198c8fbc4d | [
"Apache-2.0"
] | 20 | 2018-10-03T02:29:34.000Z | 2020-01-17T14:40:23.000Z | from shared.utils import configure_test_logger
configure_test_logger()
| 18 | 46 | 0.875 |
03781376a9d2e6cd93d5f1a2deb2ec3e12a47e57 | 6,088 | py | Python | src/oci/opsi/models/update_exadata_insight_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/opsi/models/update_exadata_insight_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/opsi/models/update_exadata_insight_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateExadataInsightDetails(object):
"""
The information to be updated.
"""
#: A constant which can be used with the entity_source property of a UpdateExadataInsightDetails.
#: This constant has a value of "EM_MANAGED_EXTERNAL_EXADATA"
ENTITY_SOURCE_EM_MANAGED_EXTERNAL_EXADATA = "EM_MANAGED_EXTERNAL_EXADATA"
def __init__(self, **kwargs):
"""
Initializes a new UpdateExadataInsightDetails object with values from keyword arguments. This class has the following subclasses and if you are using this class as input
to a service operations then you should favor using a subclass over the base class:
* :class:`~oci.opsi.models.UpdateEmManagedExternalExadataInsightDetails`
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param entity_source:
The value to assign to the entity_source property of this UpdateExadataInsightDetails.
Allowed values for this property are: "EM_MANAGED_EXTERNAL_EXADATA"
:type entity_source: str
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateExadataInsightDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this UpdateExadataInsightDetails.
:type defined_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'entity_source': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'entity_source': 'entitySource',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags'
}
self._entity_source = None
self._freeform_tags = None
self._defined_tags = None
@staticmethod
def get_subtype(object_dictionary):
"""
Given the hash representation of a subtype of this class,
use the info in the hash to return the class of the subtype.
"""
type = object_dictionary['entitySource']
if type == 'EM_MANAGED_EXTERNAL_EXADATA':
return 'UpdateEmManagedExternalExadataInsightDetails'
else:
return 'UpdateExadataInsightDetails'
@property
def entity_source(self):
"""
**[Required]** Gets the entity_source of this UpdateExadataInsightDetails.
Source of the Exadata system.
Allowed values for this property are: "EM_MANAGED_EXTERNAL_EXADATA"
:return: The entity_source of this UpdateExadataInsightDetails.
:rtype: str
"""
return self._entity_source
@entity_source.setter
def entity_source(self, entity_source):
"""
Sets the entity_source of this UpdateExadataInsightDetails.
Source of the Exadata system.
:param entity_source: The entity_source of this UpdateExadataInsightDetails.
:type: str
"""
allowed_values = ["EM_MANAGED_EXTERNAL_EXADATA"]
if not value_allowed_none_or_none_sentinel(entity_source, allowed_values):
raise ValueError(
"Invalid value for `entity_source`, must be None or one of {0}"
.format(allowed_values)
)
self._entity_source = entity_source
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this UpdateExadataInsightDetails.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:return: The freeform_tags of this UpdateExadataInsightDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this UpdateExadataInsightDetails.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:param freeform_tags: The freeform_tags of this UpdateExadataInsightDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this UpdateExadataInsightDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:return: The defined_tags of this UpdateExadataInsightDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this UpdateExadataInsightDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:param defined_tags: The defined_tags of this UpdateExadataInsightDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 36.45509 | 245 | 0.668528 |
370f6efa84ed6484645e4a1277112f7f17cfa97e | 5,156 | py | Python | lib/solvers.py | Pexure/ScanNet-ME-3DSegmentation | f6d5b9d5e34b78abd39f68646ed63e0de843e252 | [
"MIT"
] | 19 | 2020-05-06T15:19:31.000Z | 2022-03-17T09:42:28.000Z | lib/solvers.py | Pexure/ScanNet-ME-3DSegmentation | f6d5b9d5e34b78abd39f68646ed63e0de843e252 | [
"MIT"
] | 5 | 2020-06-07T17:02:21.000Z | 2021-12-16T08:44:47.000Z | lib/solvers.py | Pexure/ScanNet-ME-3DSegmentation | f6d5b9d5e34b78abd39f68646ed63e0de843e252 | [
"MIT"
] | 3 | 2021-04-24T04:00:25.000Z | 2021-08-13T13:08:02.000Z | # Copyright (c) Chris Choy (chrischoy@ai.stanford.edu). All Rights Reserved.
#
# Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part of
# the code.
import torch
import logging
from torch.optim import SGD, Adam
from torch.optim.lr_scheduler import LambdaLR, StepLR
class LambdaStepLR(LambdaLR):
def __init__(self, optimizer, lr_lambda, last_step=-1):
super(LambdaStepLR, self).__init__(optimizer, lr_lambda, last_step)
@property
def last_step(self):
"""Use last_epoch for the step counter"""
return self.last_epoch
@last_step.setter
def last_step(self, v):
self.last_epoch = v
class PolyLR(LambdaStepLR):
"""DeepLab learning rate policy"""
def __init__(self, optimizer, max_iter, power=0.9, last_step=-1):
super(PolyLR, self).__init__(optimizer, lambda s: (1 - s / (max_iter + 1))**power, last_step)
class SquaredLR(LambdaStepLR):
""" Used for SGD Lars"""
def __init__(self, optimizer, max_iter, last_step=-1):
super(SquaredLR, self).__init__(optimizer, lambda s: (1 - s / (max_iter + 1))**2, last_step)
class ExpLR(LambdaStepLR):
def __init__(self, optimizer, step_size, gamma=0.9, last_step=-1):
# (0.9 ** 21.854) = 0.1, (0.95 ** 44.8906) = 0.1
# To get 0.1 every N using gamma 0.9, N * log(0.9)/log(0.1) = 0.04575749 N
# To get 0.1 every N using gamma g, g ** N = 0.1 -> N * log(g) = log(0.1) -> g = np.exp(log(0.1) / N)
super(ExpLR, self).__init__(optimizer, lambda s: gamma**(s / step_size), last_step)
class SGDLars(SGD):
"""Lars Optimizer (https://arxiv.org/pdf/1708.03888.pdf)"""
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
.. note::
The implementation of SGD with Momentum/Nesterov subtly differs from
Sutskever et. al. and implementations in some other frameworks.
Considering the specific case of Momentum, the update can be written as
.. math::
v = \rho * v + g \\
p = p - lr * v
where p, g, v and :math:`\rho` denote the parameters, gradient,
velocity, and momentum respectively.
This is in contrast to Sutskever et. al. and
other frameworks which employ an update of the form
.. math::
v = \rho * v + lr * g \\
p = p - v
The Nesterov version is analogously modified.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
for p in group['params']:
if p.grad is None:
continue
d_p = p.grad.data
# LARS
w_norm = torch.norm(p.data)
lamb = w_norm / (w_norm + torch.norm(d_p))
d_p.mul_(lamb)
if weight_decay != 0:
d_p.add_(weight_decay, p.data)
if momentum != 0:
param_state = self.state[p]
if 'momentum_buffer' not in param_state:
buf = param_state['momentum_buffer'] = torch.zeros_like(p.data)
buf.mul_(momentum).add_(d_p)
else:
buf = param_state['momentum_buffer']
buf.mul_(momentum).add_(1 - dampening, d_p)
if nesterov:
d_p = d_p.add(momentum, buf)
else:
d_p = buf
p.data.add_(-group['lr'], d_p)
return loss
def initialize_optimizer(params, config):
assert config.optimizer in ['SGD', 'Adagrad', 'Adam', 'RMSProp', 'Rprop', 'SGDLars']
if config.optimizer == 'SGD':
return SGD(
params,
lr=config.lr,
momentum=config.sgd_momentum,
dampening=config.sgd_dampening,
weight_decay=config.weight_decay)
if config.optimizer == 'SGDLars':
return SGDLars(
params,
lr=config.lr,
momentum=config.sgd_momentum,
dampening=config.sgd_dampening,
weight_decay=config.weight_decay)
elif config.optimizer == 'Adam':
return Adam(
params,
lr=config.lr,
betas=(config.adam_beta1, config.adam_beta2),
weight_decay=config.weight_decay)
else:
logging.error('Optimizer type not supported')
raise ValueError('Optimizer type not supported')
def initialize_scheduler(optimizer, config, last_step=-1):
if config.scheduler == 'StepLR':
return StepLR(
optimizer, step_size=config.step_size, gamma=config.step_gamma, last_epoch=last_step)
elif config.scheduler == 'PolyLR':
return PolyLR(optimizer, max_iter=config.max_iter, power=config.poly_power, last_step=last_step)
elif config.scheduler == 'SquaredLR':
return SquaredLR(optimizer, max_iter=config.max_iter, last_step=last_step)
elif config.scheduler == 'ExpLR':
return ExpLR(
optimizer, step_size=config.exp_step_size, gamma=config.exp_gamma, last_step=last_step)
else:
logging.error('Scheduler not supported')
| 31.82716 | 105 | 0.640225 |
9d53cba3ec2e81dfe0ec8b6b2fe4b74e6fe069cf | 1,576 | py | Python | {{cookiecutter.directory_name}}/apps/core/auth.py | Serbel97/django-project-template | 3d7bba7031268ef8dedccb51d72553cdb0a13ca3 | [
"MIT"
] | null | null | null | {{cookiecutter.directory_name}}/apps/core/auth.py | Serbel97/django-project-template | 3d7bba7031268ef8dedccb51d72553cdb0a13ca3 | [
"MIT"
] | null | null | null | {{cookiecutter.directory_name}}/apps/core/auth.py | Serbel97/django-project-template | 3d7bba7031268ef8dedccb51d72553cdb0a13ca3 | [
"MIT"
] | null | null | null | from http import HTTPStatus
from typing import Optional
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.conf import settings
from django.utils import timezone
from django.utils.text import slugify
from django.utils.translation import gettext as _
from apps.core.models import Token
from apps.api.errors import ProblemDetailException
User = get_user_model()
class BearerBackend(ModelBackend):
@staticmethod
def _by_token(token) -> Optional[User]:
try:
token_obj = Token.objects.get(pk=token)
except (Token.DoesNotExist, ValidationError):
return None
token_obj.user.last_login = timezone.now()
token_obj.user.save()
return token_obj.user
def authenticate(self, request, **kwargs) -> Optional[User]:
if kwargs.get('bearer'):
user = self._by_token(kwargs.get('bearer'))
else:
user = super().authenticate(request, **kwargs)
return user
class BasicBackend(ModelBackend):
def authenticate(self, request, **kwargs):
user = super().authenticate(request, **kwargs)
if not user:
raise ProblemDetailException(
request,
_('Invalid credentials'),
status=HTTPStatus.UNAUTHORIZED,
extra_headers=(
('WWW-Authenticate', f'Bearer realm="{slugify(settings.INSTANCE_NAME)}"'),
)
)
return user
| 29.185185 | 94 | 0.654188 |
8d78c1a214c09fc33a5d7da4efe160916b1dbb66 | 1,014 | py | Python | flask_angular_injection/setup.py | sujeetpillai/flask_angular_injection | cfdc75f2b7f1a0843bc6f9c595a89a7aaae8abb1 | [
"MIT"
] | 1 | 2019-03-16T13:34:40.000Z | 2019-03-16T13:34:40.000Z | flask_angular_injection/setup.py | sujeetpillai/flask_angular_injection | cfdc75f2b7f1a0843bc6f9c595a89a7aaae8abb1 | [
"MIT"
] | 1 | 2016-07-29T16:47:04.000Z | 2016-07-29T16:47:04.000Z | flask_angular_injection/setup.py | sujeetpillai/flask_angular_injection | cfdc75f2b7f1a0843bc6f9c595a89a7aaae8abb1 | [
"MIT"
] | null | null | null | """
Flask-Angular-Injection
-------------
Flask extension to inject Flask data into AngularJS controller through a service.
"""
from setuptools import setup
setup(
name='Flask-Angular-Injection',
version='1.0',
url='http://github.io/flask-angular-injection/',
license='MIT',
author='Sujeet Pillai',
author_email='sujeet.pillai@gmail.com',
description='Flask extension to inject Flask data into angularjs through a service',
long_description=__doc__,
packages=['flask_sqlite3'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'Flask'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
) | 28.971429 | 88 | 0.647929 |
c6a04f99977830e4b26086b0f2c42227c1e2a187 | 100,339 | py | Python | kgtk/io/kgtkreader.py | nicklein/kgtk | 54a95e6319251a34815053e0c26973f1c91998af | [
"MIT"
] | null | null | null | kgtk/io/kgtkreader.py | nicklein/kgtk | 54a95e6319251a34815053e0c26973f1c91998af | [
"MIT"
] | null | null | null | kgtk/io/kgtkreader.py | nicklein/kgtk | 54a95e6319251a34815053e0c26973f1c91998af | [
"MIT"
] | null | null | null | """Read a KGTK node or edge file in TSV format.
Normally, results are obtained as rows of string values obtained by iteration
on the KgtkReader object. Alternative iterators are available to return the results
as:
* concise_rows: lists of strings with empty fields converted to None
* kgtk_values: lists of KgtkValue objects
* concise_kgtk_values: lists of KgtkValue objects with empty fields converted to None
* dicts: dicts of strings
* dicts(concise=True): dicts of strings with empty fields omitted
* kgtk_value_dicts: dicts of KgtkValue objects
* kgtk_value_dicts(concise=True): dicts of KgtkValue objects with empty fields omitted
TODO: Add support for alternative envelope formats, such as JSON.
"""
from argparse import ArgumentParser, _ArgumentGroup, Namespace, SUPPRESS
import attr
from enum import Enum
import io
from multiprocessing import Process, Queue
import os
from pathlib import Path
import sys
import typing
from kgtk.kgtkformat import KgtkFormat
from kgtk.io.kgtkbase import KgtkBase
from kgtk.utils.argparsehelpers import optional_bool
from kgtk.utils.closableiter import ClosableIter, ClosableIterTextIOWrapper
from kgtk.utils.enumnameaction import EnumNameAction
from kgtk.utils.gzipprocess import GunzipProcess
from kgtk.utils.validationaction import ValidationAction
from kgtk.value.kgtkvalue import KgtkValue
from kgtk.value.kgtkvalueoptions import KgtkValueOptions, DEFAULT_KGTK_VALUE_OPTIONS
class KgtkReaderMode(Enum):
"""
There are four file reading modes:
"""
NONE = 0 # Enforce neither edge nore node file required columns
EDGE = 1 # Enforce edge file required columns
NODE = 2 # Enforce node file require columns
AUTO = 3 # Automatically decide whether to enforce edge or node file required columns
@attr.s(slots=True, frozen=True)
class KgtkReaderOptions():
ERROR_LIMIT_DEFAULT: int = 1000
GZIP_QUEUE_SIZE_DEFAULT: int = GunzipProcess.GZIP_QUEUE_SIZE_DEFAULT
MGZIP_THREAD_COUNT_DEFAULT: int = 3
GRAPH_CACHE_FETCHMANY_SIZE_DEFAULT: int = 1000
GRAPH_CACHE_FILTER_BATCH_SIZE_DEFAULT: int = 1000
# TODO: use an enum
INPUT_FORMAT_CSV: str = "csv"
INPUT_FORMAT_KGTK: str = "kgtk"
INPUT_FORMAT_CHOICES: typing.List[str] = [
INPUT_FORMAT_CSV,
INPUT_FORMAT_KGTK,
]
INPUT_FORMAT_DEFAULT: str = INPUT_FORMAT_KGTK
mode: KgtkReaderMode = attr.ib(validator=attr.validators.instance_of(KgtkReaderMode), default=KgtkReaderMode.AUTO)
# The column separator is normally tab.
column_separator: str = attr.ib(validator=attr.validators.instance_of(str), default=KgtkFormat.COLUMN_SEPARATOR)
# supply a missing header record or override an existing header record.
force_column_names: typing.Optional[typing.List[str]] = attr.ib(validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(str),
iterable_validator=attr.validators.instance_of(list))),
default=None)
no_input_header: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Data record sampling, pre-validation.
#
# 1) Optionally read and skip a specific number of initial records, or record_limit - tail_count,
# whichever is greater.
# 2) Optionally pass through every nth record relative to the number of records read.
# 3) Optionally limit the total number of records read.
initial_skip_count: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
every_nth_record: int = attr.ib(validator=attr.validators.instance_of(int), default=1)
record_limit: typing.Optional[int] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(int)), default=None)
tail_count: typing.Optional[int] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(int)), default=None)
# How do we handle errors?
error_limit: int = attr.ib(validator=attr.validators.instance_of(int), default=ERROR_LIMIT_DEFAULT) # >0 ==> limit error reports
# Top-level validation controls:
repair_and_validate_lines: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
repair_and_validate_values: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Ignore empty lines, comments, and all whitespace lines, etc.?
empty_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.EXCLUDE)
comment_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.EXCLUDE)
whitespace_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.EXCLUDE)
# Ignore records with empty values in certain fields:
blank_required_field_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.EXCLUDE)
# Ignore records with too many or too few fields?
short_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.COMPLAIN)
long_line_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.COMPLAIN)
# How should header errors be processed?
header_error_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.EXIT)
unsafe_column_name_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.REPORT)
# Validate data cell values?
invalid_value_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.COMPLAIN)
prohibited_list_action: ValidationAction = attr.ib(validator=attr.validators.instance_of(ValidationAction), default=ValidationAction.COMPLAIN)
# Repair records with too many or too few fields?
fill_short_lines: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
truncate_long_lines: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Other implementation options?
input_format: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None) # TODO: use an Enum
compression_type: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None) # TODO: use an Enum
use_mgzip: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
mgzip_threads: int = attr.ib(validator=attr.validators.instance_of(int), default=MGZIP_THREAD_COUNT_DEFAULT)
gzip_in_parallel: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
gzip_queue_size: int = attr.ib(validator=attr.validators.instance_of(int), default=GZIP_QUEUE_SIZE_DEFAULT)
prohibit_whitespace_in_column_names: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
implied_label: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None)
graph_cache: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None)
use_graph_cache_envar: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
graph_cache_fetchmany_size: int = attr.ib(validator=attr.validators.instance_of(int), default=GRAPH_CACHE_FETCHMANY_SIZE_DEFAULT)
graph_cache_filter_batch_size: int = attr.ib(validator=attr.validators.instance_of(int), default=GRAPH_CACHE_FILTER_BATCH_SIZE_DEFAULT)
@classmethod
def add_arguments(cls,
parser: ArgumentParser,
mode_options: bool = False,
default_mode: KgtkReaderMode = KgtkReaderMode.AUTO,
validate_by_default: bool = False,
expert: bool = False,
defaults: bool = True,
who: str = "",
):
# This helper function makes it easy to suppress options from
# The help message. The options are still there, and initialize
# what they need to initialize.
def h(msg: str)->str:
if expert:
return msg
else:
return SUPPRESS
# This helper function decices whether or not to include defaults
# in argument declarations. If we plan to make arguments with
# prefixes and fallbacks, the fallbacks (the ones without prefixes)
# should get default values, while the prefixed arguments should
# not get defaults.
#
# Note: In obscure circumstances (EnumNameAction, I'm looking at you),
# explicitly setting "default=None" may fail, whereas omitting the
# "default=" phrase succeeds.
#
# TODO: continue researching these issues.
def d(default: typing.Any)->typing.Mapping[str, typing.Any]:
if defaults:
return {"default": default}
else:
return { }
prefix1: str = "--" if len(who) == 0 else "--" + who + "-"
prefix2: str = "" if len(who) == 0 else who + "_"
prefix3: str = "" if len(who) == 0 else who + ": "
prefix4: str = "" if len(who) == 0 else who + " file "
fgroup: _ArgumentGroup = parser.add_argument_group(h(prefix3 + "File options"),
h("Options affecting " + prefix4 + "processing."))
fgroup.add_argument(prefix1 + "column-separator",
dest=prefix2 + "column_separator",
help=h(prefix3 + "Column separator (default=<TAB>)."), # TODO: provide the default with escapes, e.g. \t
type=str, **d(default=KgtkFormat.COLUMN_SEPARATOR))
# TODO: use an Enum or add choices.
fgroup.add_argument(prefix1 + "input-format",
dest=prefix2 + "input_format",
help=h(prefix3 + "Specify the input format (default=%(default)s)."))
fgroup.add_argument(prefix1 + "compression-type",
dest=prefix2 + "compression_type",
help=h(prefix3 + "Specify the compression type (default=%(default)s)."))
fgroup.add_argument(prefix1 + "error-limit",
dest=prefix2 + "error_limit",
help=h(prefix3 + "The maximum number of errors to report before failing (default=%(default)s)"),
type=int, **d(default=cls.ERROR_LIMIT_DEFAULT))
fgroup.add_argument(prefix1 + "use-mgzip",
dest=prefix2 + "use_mgzip",
metavar="optional True|False",
help=h(prefix3 + "Execute multithreaded gzip. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
fgroup.add_argument(prefix1 + "mgzip-threads", dest=prefix2 + "mgzip_threads",
help=h(prefix3 + "Multithreaded gzip thread count. (default=%(default)s)."),
type=int, **d(default=cls.MGZIP_THREAD_COUNT_DEFAULT))
fgroup.add_argument(prefix1 + "gzip-in-parallel",
dest=prefix2 + "gzip_in_parallel",
metavar="optional True|False",
help=h(prefix3 + "Execute gzip in parallel. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
fgroup.add_argument(prefix1 + "gzip-queue-size",
dest=prefix2 + "gzip_queue_size",
help=h(prefix3 + "Queue size for parallel gzip. (default=%(default)s)."),
type=int, **d(default=cls.GZIP_QUEUE_SIZE_DEFAULT))
fgroup.add_argument(prefix1 + "implied-label",
dest=prefix2 + "implied_label",
help=h(prefix3 + "When specified, imply a label colum with the specified value (default=%(default)s)."),
type=str, **d(default=None))
fgroup.add_argument(prefix1 + "use-graph-cache-envar",
dest=prefix2 + "use_graph_cache_envar",
metavar="optional True|False",
help=h(prefix3 + "use KGTK_GRAPH_CACHE if --graph-cache is not specified. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=True))
fgroup.add_argument(prefix1 + "graph-cache",
dest=prefix2 + "graph_cache",
help=h(prefix3 + "When specified, look for input files in a graph cache. (default=%(default)s)."),
type=str, **d(default=None))
fgroup.add_argument(prefix1 + "graph-cache-fetchmany-size", dest=prefix2 + "graph_cache_fetchmany_size",
help=h(prefix3 + "Graph cache transfer buffer size. (default=%(default)s)."),
type=int, **d(default=cls.GRAPH_CACHE_FETCHMANY_SIZE_DEFAULT))
fgroup.add_argument(prefix1 + "graph-cache-filter-batch-size", dest=prefix2 + "graph_cache_filter_batch_size",
help=h(prefix3 + "Graph cache filter batch size. (default=%(default)s)."),
type=int, **d(default=cls.GRAPH_CACHE_FILTER_BATCH_SIZE_DEFAULT))
if mode_options:
fgroup.add_argument(prefix1 + "mode",
dest=prefix2 + "mode",
help=h(prefix3 + "Determine the KGTK file mode (default=%(default)s)."),
type=KgtkReaderMode, action=EnumNameAction, **d(default_mode))
hgroup: _ArgumentGroup = parser.add_argument_group(h(prefix3 + "Header parsing"),
h("Options affecting " + prefix4 + "header parsing."))
hgroup.add_argument(prefix1 + "input-column-names",
prefix1 + "force-column-names",
dest=prefix2 + "force_column_names",
help=h(prefix3 + "Supply input column names when the input file does not " +
"have a header record (--no-input-header=True), " +
"or forcibly override the column names when " +
"a header row exists (--no-input-header=False) (default=None)."),
nargs='+')
hgroup.add_argument(prefix1 + "no-input-header",
dest=prefix2 + "no_input_header",
metavar="optional True|False",
help=h(prefix3 + "When the input file does not have a header record, specify " +
"--no-input-header=True and --input-column-names. When the input file does " +
"have a header record that you want to forcibly override, specify " +
"--input-column-names and --no-input-header=False. " +
"--no-input-header has no effect when --input-column-names " +
"has not been specified. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
hgroup.add_argument(prefix1 + "header-error-action",
dest=prefix2 + "header_error_action",
help=h(prefix3 + "The action to take when a header error is detected. Only ERROR or EXIT are supported (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.EXIT))
hgroup.add_argument(prefix1 + "unsafe-column-name-action",
dest=prefix2 + "unsafe_column_name_action",
help=h(prefix3 + "The action to take when a column name is unsafe (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.REPORT))
hgroup.add_argument(prefix1 + "prohibit-whitespace-in-column-names",
dest=prefix2 + "prohibit_whitespace_in_column_names",
metavar="optional True|False",
help=h(prefix3 + "Prohibit whitespace in column names. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
sgroup: _ArgumentGroup = parser.add_argument_group(h(prefix3 + "Pre-validation sampling"),
h("Options affecting " + prefix4 + "pre-validation data line sampling."))
sgroup.add_argument(prefix1 + "initial-skip-count",
dest=prefix2 + "initial_skip_count",
help=h(prefix3 + "The number of data records to skip initially (default=do not skip)."),
type=int, **d(default=0))
sgroup.add_argument(prefix1 + "every-nth-record",
dest=prefix2 + "every_nth_record",
help=h(prefix3 + "Pass every nth record (default=pass all records)."),
type=int, **d(default=1))
sgroup.add_argument(prefix1 + "record-limit",
dest=prefix2 + "record_limit",
help=h(prefix3 + "Limit the number of records read (default=no limit)."),
type=int, **d(default=None))
sgroup.add_argument(prefix1 + "tail-count",
dest=prefix2 + "tail_count",
help=h(prefix3 + "Pass this number of records (default=no tail processing)."),
type=int, **d(default=None))
lgroup: _ArgumentGroup = parser.add_argument_group(h(prefix3 + "Line parsing"),
h("Options affecting " + prefix4 + "data line parsing."))
lgroup.add_argument(prefix1 + "repair-and-validate-lines",
dest=prefix2 + "repair_and_validate_lines",
metavar="optional True|False",
help=h(prefix3 + "Repair and validate lines (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=validate_by_default))
lgroup.add_argument(prefix1 + "repair-and-validate-values",
dest=prefix2 + "repair_and_validate_values",
metavar="optional True|False",
help=h(prefix3 + "Repair and validate values (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=validate_by_default))
lgroup.add_argument(prefix1 + "blank-required-field-line-action",
dest=prefix2 + "blank_required_field_line_action",
help=h(prefix3 + "The action to take when a line with a blank node1, node2, or id field (per mode) is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.EXCLUDE))
lgroup.add_argument(prefix1 + "comment-line-action",
dest=prefix2 + "comment_line_action",
help=h(prefix3 + "The action to take when a comment line is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.EXCLUDE))
lgroup.add_argument(prefix1 + "empty-line-action",
dest=prefix2 + "empty_line_action",
help=h(prefix3 + "The action to take when an empty line is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.EXCLUDE))
lgroup.add_argument(prefix1 + "fill-short-lines",
dest=prefix2 + "fill_short_lines",
metavar="optional True|False",
help=h(prefix3 + "Fill missing trailing columns in short lines with empty values (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
lgroup.add_argument(prefix1 + "invalid-value-action",
dest=prefix2 + "invalid_value_action",
help=h(prefix3 + "The action to take when a data cell value is invalid (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.COMPLAIN))
lgroup.add_argument(prefix1 + "long-line-action",
dest=prefix2 + "long_line_action",
help=h(prefix3 + "The action to take when a long line is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.COMPLAIN))
lgroup.add_argument(prefix1 + "prohibited-list-action",
dest=prefix2 + "prohibited_list_action",
help=h(prefix3 + "The action to take when a data cell contains a prohibited list (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.COMPLAIN))
lgroup.add_argument(prefix1 + "short-line-action",
dest=prefix2 + "short_line_action",
help=h(prefix3 + "The action to take when a short line is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.COMPLAIN))
lgroup.add_argument(prefix1 + "truncate-long-lines",
dest=prefix2 + "truncate_long_lines",
help=h(prefix3 + "Remove excess trailing columns in long lines (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, **d(default=False))
lgroup.add_argument(prefix1 + "whitespace-line-action",
dest=prefix2 + "whitespace_line_action",
help=h(prefix3 + "The action to take when a whitespace line is detected (default=%(default)s)."),
type=ValidationAction, action=EnumNameAction, **d(default=ValidationAction.EXCLUDE))
@classmethod
# Build the value parsing option structure.
def from_dict(cls,
d: dict,
who: str = "",
mode: typing.Optional[KgtkReaderMode] = None,
fallback: bool = False,
)->'KgtkReaderOptions':
prefix: str = "" # The destination name prefix.
if len(who) > 0:
prefix = who + "_"
# TODO: Figure out how to type check this method.
def lookup(name: str, default):
prefixed_name = prefix + name
if prefixed_name in d and d[prefixed_name] is not None:
return d[prefixed_name]
elif fallback and name in d and d[name] is not None:
return d[name]
else:
return default
reader_mode: KgtkReaderMode
if mode is not None:
reader_mode = mode
else:
reader_mode = lookup("mode", KgtkReaderMode.AUTO)
return cls(
blank_required_field_line_action=lookup("blank_required_field_line_action", ValidationAction.EXCLUDE),
column_separator=lookup("column_separator", KgtkFormat.COLUMN_SEPARATOR),
comment_line_action=lookup("comment_line_action", ValidationAction.EXCLUDE),
input_format=lookup("input_format", None),
compression_type=lookup("compression_type", None),
empty_line_action=lookup("empty_line_action", ValidationAction.EXCLUDE),
error_limit=lookup("error_limit", cls.ERROR_LIMIT_DEFAULT),
every_nth_record=lookup("every_nth_record", 1),
fill_short_lines=lookup("fill_short_lines", False),
force_column_names=lookup("force_column_names", None),
no_input_header=lookup("no_input_header", False),
use_mgzip=lookup("use_mgzip", False),
mgzip_threads=lookup("mgzip_threads", cls.MGZIP_THREAD_COUNT_DEFAULT),
gzip_in_parallel=lookup("gzip_in_parallel", False),
gzip_queue_size=lookup("gzip_queue_size", KgtkReaderOptions.GZIP_QUEUE_SIZE_DEFAULT),
implied_label=lookup("implied_label", None),
graph_cache=lookup("graph_cache", None),
use_graph_cache_envar=lookup("use_graph_cache_envar", True),
graph_cache_fetchmany_size=lookup("graph_cache_fetchmany_size", cls.GRAPH_CACHE_FETCHMANY_SIZE_DEFAULT),
graph_cache_filter_batch_size=lookup("graph_cache_filter_batch_size", cls.GRAPH_CACHE_FILTER_BATCH_SIZE_DEFAULT),
header_error_action=lookup("header_error_action", ValidationAction.EXCLUDE),
initial_skip_count=lookup("initial_skip_count", 0),
invalid_value_action=lookup("invalid_value_action", ValidationAction.REPORT),
long_line_action=lookup("long_line_action", ValidationAction.EXCLUDE),
mode=reader_mode,
prohibited_list_action=lookup("prohibited_list_action", ValidationAction.REPORT),
record_limit=lookup("record_limit", None),
repair_and_validate_lines=lookup("repair_and_validate_lines", False),
repair_and_validate_values=lookup("repair_and_validate_values", False),
short_line_action=lookup("short_line_action", ValidationAction.EXCLUDE),
tail_count=lookup("tail_count", None),
truncate_long_lines=lookup("truncate_long_lines", False),
unsafe_column_name_action=lookup("unsafe_column_name_action", ValidationAction.REPORT),
whitespace_line_action=lookup("whitespace_line_action", ValidationAction.EXCLUDE),
prohibit_whitespace_in_column_names=lookup("prohibit_whitespace_in_column_names", False)
)
# Build the value parsing option structure.
@classmethod
def from_args(cls,
args: Namespace,
who: str = "",
mode: typing.Optional[KgtkReaderMode] = None,
fallback: bool = False,
)->'KgtkReaderOptions':
return cls.from_dict(vars(args), who=who, mode=mode, fallback=fallback)
def show(self, who: str="", out: typing.TextIO=sys.stderr):
prefix: str = "--" if len(who) == 0 else "--" + who + "-"
print("%smode=%s" % (prefix, self.mode.name), file=out)
print("%scolumn-separator=%s" % (prefix, repr(self.column_separator)), file=out)
if self.force_column_names is not None:
print("%sforce-column-names=%s" % (prefix, " ".join(self.force_column_names)), file=out)
print("%sno-input-header=%s" % (prefix, str(self.no_input_header)), file=out)
print("%serror-limit=%s" % (prefix, str(self.error_limit)), file=out)
print("%srepair-and-validate-lines=%s" % (prefix, str(self.repair_and_validate_lines)), file=out)
print("%srepair-and-validate-values=%s" % (prefix, str(self.repair_and_validate_values)), file=out)
print("%sempty-line-action=%s" % (prefix, self.empty_line_action.name), file=out)
print("%scomment-line-action=%s" % (prefix, self.comment_line_action.name), file=out)
print("%swhitespace-line-action=%s" % (prefix, self.whitespace_line_action.name), file=out)
print("%sblank-required-field-line-action=%s" % (prefix, self.blank_required_field_line_action.name), file=out)
print("%sshort-line-action=%s" % (prefix, self.short_line_action.name), file=out)
print("%slong-line-action=%s" % (prefix, self.long_line_action.name), file=out)
print("%sheader-error-action=%s" % (prefix, self.header_error_action.name), file=out)
print("%sunsafe-column-name-action=%s" % (prefix, self.unsafe_column_name_action.name), file=out)
print("%sinvalid-value-action=%s" % (prefix, self.invalid_value_action.name), file=out)
print("%sinitial-skip-count=%s" % (prefix, str(self.initial_skip_count)), file=out)
print("%severy-nth-record=%s" % (prefix, str(self.every_nth_record)), file=out)
if self.record_limit is not None:
print("%srecord-limit=%s" % (prefix, str(self.record_limit)), file=out)
if self.tail_count is not None:
print("%stail-count=%s" % (prefix, str(self.tail_count)), file=out)
print("%sinitial-skip-count=%s" % (prefix, str(self.initial_skip_count)), file=out)
print("%sprohibited-list-action=%s" % (prefix, self.prohibited_list_action.name), file=out)
print("%sfill-short-lines=%s" % (prefix, str(self.fill_short_lines)), file=out)
print("%struncate-long-lines=%s" % (prefix, str(self.truncate_long_lines)), file=out)
if self.input_format is not None:
print("%sinput-format=%s" % (prefix, str(self.input_format)), file=out)
if self.compression_type is not None:
print("%scompression-type=%s" % (prefix, str(self.compression_type)), file=out)
print("%suse-mgzip=%s" % (prefix, str(self.use_mgzip)), file=out)
print("%smgzip-threads=%s" % (prefix, str(self.mgzip_threads)), file=out)
print("%sgzip-in-parallel=%s" % (prefix, str(self.gzip_in_parallel)), file=out)
print("%sgzip-queue-size=%s" % (prefix, str(self.gzip_queue_size)), file=out)
print("%sprohibit-whitespace-in-column-names=%s" % (prefix, str(self.prohibit_whitespace_in_column_names)), file=out)
if self.implied_label is not None:
print("%simplied-label=%s" % (prefix, str(self.implied_label)), file=out)
print("%suse-graph-cache-envar=%s" % (prefix, str(self.use_graph_cache_envar)), file=out)
print("%sgraph-cache-fetchmany-size=%s" % (prefix, str(self.graph_cache_fetchmany_size)), file=out)
print("%sgraph-cache-filter-batch-size=%s" % (prefix, str(self.graph_cache_filter_batch_size)), file=out)
if self.graph_cache is not None:
print("%sgraph-cache=%s" % (prefix, str(self.graph_cache)), file=out)
DEFAULT_KGTK_READER_OPTIONS: KgtkReaderOptions = KgtkReaderOptions()
@attr.s(slots=True, frozen=False)
class KgtkReader(KgtkBase, ClosableIter[typing.List[str]]):
file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
source: ClosableIter[str] = attr.ib() # Todo: validate
# TODO: Fix this validator:
# options: KgtkReaderOptions = attr.ib(validator=attr.validators.instance_of(KgtkReaderOptions))
options: KgtkReaderOptions = attr.ib()
value_options: KgtkValueOptions = attr.ib(validator=attr.validators.instance_of(KgtkValueOptions))
column_names: typing.List[str] = attr.ib(validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(str),
iterable_validator=attr.validators.instance_of(list)))
# For convenience, the count of columns. This is the same as len(column_names).
column_count: int = attr.ib(validator=attr.validators.instance_of(int))
column_name_map: typing.Mapping[str, int] = attr.ib(validator=attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str),
value_validator=attr.validators.instance_of(int)))
input_format: str = attr.ib(validator=attr.validators.instance_of(str))
# A copy of the raw header line for use by the reject file.
header: str = attr.ib(validator=attr.validators.instance_of(str))
# The actual mode used.
#
# TODO: fix the validator.
# mode: KgtkReaderMode = attr.ib(validator=attr.validators.instance_of(KgtkReaderMode), default=KgtkReaderMode.NONE)
mode: KgtkReaderMode = attr.ib(default=KgtkReaderMode.NONE)
# The index of the mandatory/aliased columns. -1 means missing:
node1_column_idx: int = attr.ib(validator=attr.validators.instance_of(int), default=-1) # edge file
label_column_idx: int = attr.ib(validator=attr.validators.instance_of(int), default=-1) # edge file
node2_column_idx: int = attr.ib(validator=attr.validators.instance_of(int), default=-1) # edge file
id_column_idx: int = attr.ib(validator=attr.validators.instance_of(int), default=-1) # node file
data_lines_read: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_skipped: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_passed: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_ignored: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_filled: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_truncated: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_short: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_long: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_blank_fields: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_invalid_values: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_prohibited_lists: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_lines_excluded_by_filter: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
data_errors_reported: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
# Is this an edge file or a node file?
is_edge_file: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
is_node_file: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Is there an input filter?
# Note: At present, input filtering takes place after sampling, validation, and filling.
# This ordering will change in the future, to accomodate sqlite3-based queries.
#
# TODO: Build a validator for input_filter.
input_filter: typing.Optional[typing.Mapping[int, typing.Set[str]]] = attr.ib(default=None)
# Graph cache file.
graph_cache: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None)
# Use the fast reading path?
use_fast_path: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Reject file
reject_file: typing.Optional[typing.TextIO] = attr.ib(default=None)
reject_line_count: int = attr.ib(validator=attr.validators.instance_of(int), default=0)
# Feedback and error output:
error_file: typing.TextIO = attr.ib(default=sys.stderr)
verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
very_verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
COMPRESSED_FILE_EXTENSIONS: typing.List[str] = [ ".bz2", ".gz", ".lz4", ".xz" ]
CSV_FILE_EXTENSION: str = ".csv"
@classmethod
def _default_options(
cls,
options: typing.Optional[KgtkReaderOptions] = None,
value_options: typing.Optional[KgtkValueOptions] = None,
)->typing.Tuple[KgtkReaderOptions, KgtkValueOptions]:
# Supply the default reader and value options:
if options is None:
options = DEFAULT_KGTK_READER_OPTIONS
if value_options is None:
value_options = DEFAULT_KGTK_VALUE_OPTIONS
return (options, value_options)
@classmethod
def _validate_input_filter(cls,
input_filter: typing.Mapping[int, typing.Set[str]],
column_names: typing.List[str])->bool:
input_filter_key: int
for input_filter_key in sorted(input_filter.keys()):
if not isinstance(input_filter_key, int):
raise ValueError("Input filter key %s is not an int." % repr(input_filter_key))
if input_filter_key < 0:
raise ValueError("Input filter key %d is less than zero." % input_filter_key)
if input_filter_key >= len(column_names):
raise ValueError("Input filter key %d is too large (>= %d)." % (input_filter_key, len(column_names)))
input_filter_set: typing.Set[str] = input_filter[input_filter_key]
if not isinstance(input_filter_set, set):
raise ValueError("Input filter key %d does not reference a set." % input_filter_key)
if len(input_filter_set) == 0:
raise ValueError("Input filter key %d references an empty set." % input_filter_key)
input_filter_value: str
for input_filter_value in sorted(list(input_filter_set)):
if not isinstance(input_filter_value, str):
raise ValueError("Input filter key %d value %s is not a string." % (input_filter_key, repr(input_filter_value)))
def add_input_filter(self, input_filter: typing.Mapping[int, typing.Set[str]]):
if self.data_lines_read > 0:
raise ValueError("The input filter may not be changed after data lines have been read.")
if self.input_filter is not None:
raise ValueError("The input filter has already been defined.")
self._validate_input_filter(input_filter, self.column_names)
self.input_filter = input_filter
self.use_fast_path = False
@classmethod
def open(cls,
file_path: typing.Optional[typing.Union[Path, str]],
who: str = "input",
error_file: typing.TextIO = sys.stderr,
reject_file: typing.Optional[typing.TextIO] = None,
mode: typing.Optional[KgtkReaderMode] = None,
options: typing.Optional[KgtkReaderOptions] = None,
value_options: typing.Optional[KgtkValueOptions] = None,
input_filter: typing.Optional[typing.Mapping[int, typing.Set[str]]] = None,
verbose: bool = False,
very_verbose: bool = False)->"KgtkReader":
"""
Opens a KGTK file, which may be an edge file or a node file. The appropriate reader is returned.
"""
if file_path is not None and isinstance(file_path, str):
file_path = Path(file_path)
# Supply the default reader and value options:
(options, value_options) = cls._default_options(options, value_options)
# Supply the default input_format:
input_format: tr = KgtkReaderOptions.INPUT_FORMAT_KGTK
if options.input_format is not None:
input_format = options.input_format
elif file_path is not None:
if len(file_path.suffixes) == 1:
if file_path.suffix == cls.CSV_FILE_EXTENSION:
input_format = KgtkReaderOptions.INPUT_FORMAT_CSV
elif len(file_path.suffixes) > 1:
if file_path.suffixes[-1] in cls.COMPRESSED_FILE_EXTENSIONS and file_path.suffixes[-2] == cls.CSV_FILE_EXTENSION:
input_format = KgtkReaderOptions.INPUT_FORMAT_CSV
if verbose:
print("input format: %s" % input_format, file=error_file, flush=True)
# If an input_filter has been supplied, check it for validity:
if input_filter is not None:
cls._validate_input_filter(input_filter, column_names)
# Get the graph cache from the options or an envar:
graph_cache: typing.Optional[str] = options.graph_cache
if options.use_graph_cache_envar and graph_cache is None:
graph_cache = os.getenv("KGTK_GRAPH_CACHE", None)
if verbose and graph_cache is not None:
print("Using KGTK_GRAPH_CACHE=%s" % repr(graph_cache), file=error_file, flush=True)
source: ClosableIter[str]
header: str
column_names: typing.List[str]
# Decide whether or not to use the graph cache or the fast read path.
# This code assumes that the options will not change beyond this
# point.
need_record_slicing: bool = \
options.record_limit is not None or \
options.tail_count is not None or \
options.initial_skip_count != 0 or \
options.every_nth_record > 1
use_graph_cache: bool = False
use_fast_path: bool = False
if graph_cache is not None and \
file_path is not None and \
not need_record_slicing:
from kgtk.io.graphcacheadaptor import GraphCacheAdaptor
gca: typing.Optional[GraphCacheAdaptor] = GraphCacheAdaptor.open(graph_cache_path=Path(graph_cache),
file_path=file_path,
error_file=error_file,
verbose=verbose)
if gca is not None:
use_graph_cache = True
if verbose:
print("KgtkReader: Using the graph cache.", file=error_file, flush=True)
source = ClosableIterTextIOWrapper(sys.stdin) # This is a dummy definition.
column_names = gca.column_names.copy()
header = KgtkFormat.COLUMN_SEPARATOR.join(column_names)
if not use_graph_cache and \
not need_record_slicing and \
not options.repair_and_validate_lines and \
not options.repair_and_validate_values and \
options.implied_label is None and \
input_format == KgtkReaderOptions.INPUT_FORMAT_KGTK:
use_fast_path = True
if verbose:
print("KgtkReader: OK to use the fast read path.", file=error_file, flush=True)
if not use_graph_cache:
source = cls._openfile(file_path, options=options, error_file=error_file, verbose=verbose)
# Read the kgtk file header and split it into column names. We get the
# header back, too, for use in debugging and error messages.
(header, column_names) = cls._build_column_names(source, options, input_format, error_file=error_file, verbose=verbose)
# If there's an implied label, add the column to the end. If a label column
# already exists, then later we'll detect a duplicate column name.
if options.implied_label is not None:
column_names.append(cls.LABEL)
# Check for unsafe column names.
cls.check_column_names(column_names,
header_line=header,
who=who,
error_action=options.unsafe_column_name_action,
error_file=error_file,
prohibit_whitespace_in_column_names=options.prohibit_whitespace_in_column_names)
# Build a map from column name to column index.
column_name_map: typing.Mapping[str, int] = cls.build_column_name_map(column_names,
header_line=header,
who=who,
error_action=options.header_error_action,
error_file=error_file)
# Should we automatically determine if this is an edge file or a node file?
if mode is None:
mode = options.mode
is_edge_file: bool = False
is_node_file: bool = False
if mode is KgtkReaderMode.AUTO:
# If we have a node1 (or alias) column, then this must be an edge file. Otherwise, assume it is a node file.
node1_idx: int = cls.get_column_idx(cls.NODE1_COLUMN_NAMES,
column_name_map,
header_line=header,
who=who,
error_action=options.header_error_action,
error_file=error_file,
is_optional=True)
if node1_idx >= 0:
is_edge_file = True
is_node_file = False
if verbose:
print("%s column found, this is a KGTK edge file" % column_names[node1_idx], file=error_file, flush=True)
else:
is_edge_file = False
is_node_file = True
if verbose:
print("node1 column not found, assuming this is a KGTK node file", file=error_file, flush=True)
elif mode is KgtkReaderMode.EDGE:
is_edge_file = True
elif mode is KgtkReaderMode.NODE:
is_node_file = True
elif mode is KgtkReaderMode.NONE:
pass
# Get the indices of the special columns.
node1_column_idx: int
label_column_idx: int
node2_column_idx: int
id_column_idx: int
(node1_column_idx,
label_column_idx,
node2_column_idx,
id_column_idx) = cls.get_special_columns(column_name_map,
header_line=header,
who=who,
error_action=options.header_error_action,
error_file=error_file,
is_edge_file=is_edge_file,
is_node_file=is_node_file)
if verbose:
print("KgtkReader: Special columns: node1=%d label=%d node2=%d id=%d" % (node1_column_idx,
label_column_idx,
node2_column_idx,
id_column_idx), file=error_file, flush=True)
# Select the best inplementation class.
if use_graph_cache and gca is not None:
if verbose:
print("KgtkReader: Reading a kgtk file using the graph cache path.", file=error_file, flush=True)
cls = gca.reader(fetch_size=options.graph_cache_fetchmany_size,
filter_batch_size=options.graph_cache_filter_batch_size,
options=options,
)
elif use_fast_path:
# The EdgeReader/NodeReader distinctions don't matter on the fast path.
if input_filter is None:
# We'll instantiate a FastReader, which is a subclass of KgtkReader.
# The FastReader import is deferred to avoid circular imports.
from kgtk.io.fastreader import FastReader
cls = FastReader
if verbose:
print("KgtkReader: Reading a kgtk file using the fast path.", file=error_file, flush=True)
else:
# We'll instantiate a FilteredFastReader, which is a subclass of KgtkReader.
# The FilteredFastReader import is deferred to avoid circular imports.
from kgtk.io.fastreader import FilteredFastReader
cls = FilteredFastReader
if verbose:
print("KgtkReader: Reading a kgtk file using the filtered fast path.", file=error_file, flush=True)
elif is_edge_file:
# We'll instantiate an EdgeReader, which is a subclass of KgtkReader.
# The EdgeReader import is deferred to avoid circular imports.
from kgtk.io.edgereader import EdgeReader
cls = EdgeReader
if verbose:
print("KgtkReader: Reading an edge file.", file=error_file, flush=True)
elif is_node_file:
# We'll instantiate a NodeReader, which is a subclass of KgtkReader.
# The NodeReader import is deferred to avoid circular imports.
from kgtk.io.nodereader import NodeReader
cls = NodeReader
if verbose:
print("KgtkReader: Reading an node file.", file=error_file, flush=True)
else:
# cls remains KgtkReader
if verbose:
print("KgtkReader: Reading a non-KGTK file.", file=error_file, flush=True)
return cls(file_path=file_path,
source=source,
column_names=column_names,
column_name_map=column_name_map,
column_count=len(column_names),
header=header,
mode=mode,
input_format=input_format,
node1_column_idx=node1_column_idx,
label_column_idx=label_column_idx,
node2_column_idx=node2_column_idx,
id_column_idx=id_column_idx,
error_file=error_file,
reject_file=reject_file,
options=options,
value_options=value_options,
input_filter=input_filter,
graph_cache=graph_cache,
is_edge_file=is_edge_file,
is_node_file=is_node_file,
use_fast_path=use_fast_path,
verbose=verbose,
very_verbose=very_verbose,
)
@classmethod
def _open_compressed_file(cls,
compression_type: str,
file_name: str,
file_or_path: typing.Union[Path, typing.TextIO],
who: str,
use_mgzip: bool,
mgzip_threads: int,
error_file: typing.TextIO,
verbose: bool)->typing.TextIO:
# TODO: find a better way to coerce typing.IO[Any] to typing.TextIO
if compression_type in [".gz", "gz"]:
if use_mgzip:
if verbose:
print("%s: reading mgzip with %d threads: %s" % (who, mgzip_threads, file_name), file=error_file, flush=True)
import mgzip
if isinstance(file_or_path, Path):
return mgzip.open(str(file_or_path), mode="rt", thread=mgzip_threads) # type: ignore
else:
return mgzip.open(file_or_path, mode="rt", thread=mgzip_threads) # type: ignore
else:
if verbose:
print("%s: reading gzip %s" % (who, file_name), file=error_file, flush=True)
import gzip
return gzip.open(file_or_path, mode="rt") # type: ignore
elif compression_type in [".bz2", "bz2"]:
if verbose:
print("%s: reading bz2 %s" % (who, file_name), file=error_file, flush=True)
import bz2
return bz2.open(file_or_path, mode="rt") # type: ignore
elif compression_type in [".xz", "xz"]:
if verbose:
print("%s: reading lzma %s" % (who, file_name), file=error_file, flush=True)
import lzma
return lzma.open(file_or_path, mode="rt") # type: ignore
elif compression_type in [".lz4", "lz4"]:
if verbose:
print("%s: reading lz4 %s" % (who, file_name), file=error_file, flush=True)
import lz4 # type: ignore
return lz4.frame.open(file_or_path, mode="rt") # type: ignore
else:
# TODO: throw a better exception.
raise ValueError("%s: Unexpected compression_type '%s'" % (who, compression_type))
@classmethod
def _openfile(cls,
file_path: typing.Optional[Path],
options: KgtkReaderOptions,
error_file: typing.TextIO,
verbose: bool)->ClosableIter[str]:
who: str = cls.__name__
if file_path is None or str(file_path) == "-":
if options.compression_type is not None and len(options.compression_type) > 0:
return ClosableIterTextIOWrapper(cls._open_compressed_file(options.compression_type,
"-",
sys.stdin,
who,
options.use_mgzip,
options.mgzip_threads,
error_file,
verbose))
else:
if verbose:
print("%s: reading stdin" % who, file=error_file, flush=True)
return ClosableIterTextIOWrapper(sys.stdin)
if str(file_path).startswith("<"):
# Note: compression is not currently supported for fd input files.
fd: int = int(str(file_path)[1:])
if verbose:
print("%s: reading file descriptor %d" % (who, fd), file=error_file, flush=True)
return ClosableIterTextIOWrapper(open(fd, "r"))
if verbose:
print("%s: File_path.suffix: %s" % (who, file_path.suffix), file=error_file, flush=True)
input_file: typing.TextIO
if options.compression_type is not None and len(options.compression_type) > 0:
input_file = cls._open_compressed_file(options.compression_type,
str(file_path),
file_path,
who,
options.use_mgzip,
options.mgzip_threads,
error_file,
verbose)
elif file_path.suffix in cls.COMPRESSED_FILE_EXTENSIONS:
input_file = cls._open_compressed_file(file_path.suffix,
str(file_path),
file_path,
who,
options.use_mgzip,
options.mgzip_threads,
error_file,
verbose)
else:
if verbose:
print("%s: reading file %s" % (who, str(file_path)), file=error_file, flush=True)
return ClosableIterTextIOWrapper(open(file_path, "r"))
if options.gzip_in_parallel:
gzip_thread: GunzipProcess = GunzipProcess(input_file, Queue(options.gzip_queue_size))
gzip_thread.start()
return gzip_thread
else:
return ClosableIterTextIOWrapper(input_file)
@classmethod
def _build_column_names(cls,
source: ClosableIter[str],
options: KgtkReaderOptions,
input_format: str,
error_file: typing.TextIO,
verbose: bool = False,
)->typing.Tuple[str, typing.List[str]]:
"""
Read the kgtk file header and split it into column names.
"""
column_names: typing.List[str]
if options.force_column_names is None:
# Read the column names from the first line, stripping end-of-line characters.
#
# TODO: if the read fails, throw a more useful exception with the line number.
try:
header: str = next(source).rstrip("\r\n")
except StopIteration:
raise ValueError("No header line in file")
if verbose:
print("header: %s" % header, file=error_file, flush=True)
# Split the first line into column names.
if input_format == KgtkReaderOptions.INPUT_FORMAT_CSV:
return header, cls.csvsplit(header)
else:
return header, header.split(options.column_separator)
else:
# Skip the first record to override the column names in the file.
# Do not skip the first record if the file does not have a header record.
if verbose:
print("Forcing column names", file=error_file, flush=True)
if options.no_input_header:
pass # Nothing to do.
else:
if verbose:
print("Skipping a header record", file=error_file, flush=True)
try:
next(source)
except StopIteration:
raise ValueError("No header line to skip")
# Use the forced column names.
return options.column_separator.join(options.force_column_names), options.force_column_names
def close(self):
self.source.close()
def exclude_line(self, action: ValidationAction, msg: str, line: str)->bool:
"""
Take a validation action. Returns True if the line should be excluded.
"""
result: bool
if action == ValidationAction.PASS:
return False # Silently pass the line through
elif action == ValidationAction.REPORT:
result= False # Report the issue then pass the line.
elif action == ValidationAction.EXCLUDE:
return True # Silently exclude the line
elif action == ValidationAction.COMPLAIN:
result = True # Report the issue then exclude the line.
elif action == ValidationAction.ERROR:
# Immediately raise an exception.
raise ValueError("In input data line %d, %s: %s" % (self.data_lines_read, msg, line))
elif action == ValidationAction.EXIT:
print("Data line %d:\n%s\n%s" % (self.data_lines_read, line, msg), file=self.error_file, flush=True)
sys.exit(1)
# print("In input data line %d, %s: %s" % (self.data_lines_read, msg, line), file=self.error_file, flush=True)
print("Data line %d:\n%s\n%s" % (self.data_lines_read, line, msg), file=self.error_file, flush=True)
self.data_errors_reported += 1
if self.options.error_limit > 0 and self.data_errors_reported >= self.options.error_limit:
raise ValueError("Too many data errors, exiting.")
return result
def reject(self, line):
if self.reject_file is None:
return
if self.reject_line_count == 0:
if self.header.endswith("\n") or self.header.endswith("\r"):
self.reject_file.write(self.header)
else:
print("%s" % self.header, file=self.reject_file)
self.reject_line_count += 1
if line.endswith("\n") or line.endswith("\r"):
self.reject_file.write(line)
else:
print("%s" % line, file=self.reject_file)
@classmethod
def csvsplit(cls, line: str)->typing.List[str]:
row: typing.List[str] = [ ]
item: str = ""
c: str
instring: bool = False
sawstring:bool = False
for c in line:
if instring:
if c == '"':
# TODO: optionally look for escaped characters.
instring = False
sawstring = True
else:
item += c
else:
if c == '"':
instring = True
if sawstring:
item += c
elif c == ",":
if sawstring:
row.append(KgtkFormat.stringify(item))
else:
row.append(item)
item = ""
else:
item += c
sawstring = False
if sawstring:
row.append(KgtkFormat.stringify(item))
else:
row.append(item)
# for x in row:
# print("X: '%s'" % x, file=sys.stderr, flush=True)
return row
# Get the next edge values as a list of strings.
def nextrow(self)-> typing.List[str]:
line: str
row: typing.List[str]
repair_and_validate_lines: bool = self.options.repair_and_validate_lines
repair_and_validate_values: bool = self.options.repair_and_validate_values
# Compute the initial skip count
skip_count: int = self.options.initial_skip_count
if self.options.record_limit is not None and self.options.tail_count is not None:
# Compute the tail count.
tail_skip_count: int = self.options.record_limit - self.options.tail_count
if tail_skip_count > skip_count:
skip_count = tail_skip_count # Take the larger skip count.
# This loop accomodates lines that are ignored.
while (True):
# Has a record limit been specified and have we reached it?
if self.options.record_limit is not None:
if self.data_lines_read >= self.options.record_limit:
# Close the source and stop the iteration.
self.source.close() # Do we need to guard against repeating this call?
raise StopIteration
# Read a line from the source
try:
line = next(self.source) # Will throw StopIteration
except StopIteration as e:
# Close the input file!
#
# TODO: implement a close() routine and/or whatever it takes to support "with".
self.source.close() # Do we need to guard against repeating this call?
raise e
# Count the data line read.
self.data_lines_read += 1
# Data sampling:
if self.data_lines_read <= skip_count:
self.data_lines_skipped += 1
continue
if self.options.every_nth_record > 1:
if self.data_lines_read % self.options.every_nth_record != 0:
self.data_lines_skipped += 1
continue
# Strip the end-of-line characters:
line = line.rstrip("\r\n")
if repair_and_validate_lines:
# TODO: Use a separate option to control this.
if self.very_verbose:
print("'%s'" % line, file=self.error_file, flush=True)
# Ignore empty lines.
if self.options.empty_line_action != ValidationAction.PASS and len(line) == 0:
if self.exclude_line(self.options.empty_line_action, "saw an empty line", line):
self.reject(line)
self.data_lines_ignored += 1
continue
# Ignore comment lines:
if self.options.comment_line_action != ValidationAction.PASS and line[0] == self.COMMENT_INDICATOR:
if self.exclude_line(self.options.comment_line_action, "saw a comment line", line):
self.reject(line)
self.data_lines_ignored += 1
continue
# Ignore whitespace lines
if self.options.whitespace_line_action != ValidationAction.PASS and line.isspace():
if self.exclude_line(self.options.whitespace_line_action, "saw a whitespace line", line):
self.reject(line)
self.data_lines_ignored += 1
continue
if self.input_format == KgtkReaderOptions.INPUT_FORMAT_CSV:
row = self.csvsplit(line)
else:
row = line.split(self.options.column_separator)
# Optionally add the implied label value, after allowing for
# filling and truncation options.
#
# TODO: make this more efficient.
if self.options.implied_label is not None:
if repair_and_validate_lines:
# Optionally fill missing trailing columns with empty row:
if self.options.fill_short_lines and len(row) < self.column_count - 1:
while len(row) < self.column_count - 1:
row.append("")
self.data_lines_filled += 1
# Optionally remove extra trailing columns:
if self.options.truncate_long_lines and len(row) > self.column_count - 1:
row = row[:self.column_count-1]
self.data_lines_truncated += 1
# Append the implied label value:
row.append(self.options.implied_label)
if repair_and_validate_lines:
# Optionally fill missing trailing columns with empty cells:
if self.options.fill_short_lines and len(row) < self.column_count:
while len(row) < self.column_count:
row.append("")
self.data_lines_filled += 1
# Optionally remove extra trailing columns:
if self.options.truncate_long_lines and len(row) > self.column_count:
row = row[:self.column_count]
self.data_lines_truncated += 1
# Optionally validate that the line contained the right number of columns:
#
# When we report line numbers in error messages, line 1 is the first line after the header line.
if self.options.short_line_action != ValidationAction.PASS and len(row) < self.column_count:
if self.exclude_line(self.options.short_line_action,
"Required %d columns, saw %d: '%s'" % (self.column_count,
len(row),
line),
line):
self.reject(line)
self.data_lines_excluded_short += 1
continue
if self.options.long_line_action != ValidationAction.PASS and len(row) > self.column_count:
if self.exclude_line(self.options.long_line_action,
"Required %d columns, saw %d (%d extra): '%s'" % (self.column_count,
len(row),
len(row) - self.column_count,
line),
line):
self.reject(line)
self.data_lines_excluded_long += 1
continue
if self._ignore_if_blank_required_fields(row, line):
self.reject(line)
self.data_lines_excluded_blank_fields += 1
continue
if repair_and_validate_values:
if self.options.invalid_value_action != ValidationAction.PASS:
# TODO: find a way to optionally cache the KgtkValue objects
# so we don't have to create them a second time in the conversion
# and iterator methods below.
if self._ignore_invalid_values(row, line):
self.reject(line)
self.data_lines_excluded_invalid_values += 1
continue
if self.options.prohibited_list_action != ValidationAction.PASS:
if self._ignore_prohibited_lists(row, line):
self.reject(line)
self.data_lines_excluded_prohibited_lists += 1
continue
if self.input_filter is not None:
input_filter_idx: int
input_filter_set: typing.Set[str]
fail: bool = False
for input_filter_idx, input_filter_set in self.input_filter.items():
value: str = row[input_filter_idx]
if value not in input_filter_set:
fail = True
break
if fail:
self.data_lines_excluded_by_filter += 1
continue
self.data_lines_passed += 1
# TODO: User a seperate option to control this.
# if self.very_verbose:
# self.error_file.write(".")
# self.error_file.flush()
return row
# This is both an iterable and an iterator object.
def __iter__(self)->typing.Iterator[typing.List[str]]:
return self
# Get the next row values as a list of strings.
# TODO: Convert integers, coordinates, etc. to Python types
def __next__(self)-> typing.List[str]:
return self.nextrow()
def concise_rows(self)->typing.Iterator[typing.List[typing.Optional[str]]]:
"""
Using a generator function, create an iterator that returns rows of fields
as strings. Empty fields will be returned as None.
"""
while True:
try:
row: typing.List[str] = self.nextrow()
except StopIteration:
return
# Copy the row, converting empty fields into None:
results: typing.List[typing.Optional[str]] = [ ]
field: str
for field in row:
if len(field) == 0:
results.append(None)
else:
results.append(field)
yield results
def to_kgtk_values(self, row: typing.List[str],
validate: bool = False,
parse_fields: bool = False)->typing.List[KgtkValue]:
"""
Convert an input row into a list of KgtkValue instances.
When validate is True, validate each KgtkValue object.
"""
results: typing.List[KgtkValue] = [ ]
field: str
for field in row:
kv = KgtkValue(field, options=self.value_options, parse_fields=parse_fields)
if validate:
kv.validate()
results.append(kv)
return results
def kgtk_values(self,
validate: bool = False,
parse_fields: bool = False
)->typing.Iterator[typing.List[KgtkValue]]:
"""
Using a generator function, create an iterator that returns rows of fields
as KgtkValue objects.
When validate is True, validate each KgtkValue object.
"""
while True:
try:
yield self.to_kgtk_values(self.nextrow(), validate=validate, parse_fields=parse_fields)
except StopIteration:
return
def to_concise_kgtk_values(self,
row: typing.List[str],
validate: bool = False,
parse_fields: bool = False
)->typing.List[typing.Optional[KgtkValue]]:
"""
Convert an input row into a list of KgtkValue instances. Empty fields will be returned as None.
When validate is True, validate each KgtkValue object.
"""
results: typing.List[typing.Optional[KgtkValue]] = [ ]
field: str
for field in row:
if len(field) == 0:
results.append(None)
else:
kv = KgtkValue(field, options=self.value_options, parse_fields=parse_fields)
if validate:
kv.validate()
results.append(kv)
return results
def concise_kgtk_values(self,
validate: bool = False,
parse_fields: bool = False
)->typing.Iterator[typing.List[typing.Optional[KgtkValue]]]:
"""
Using a generator function, create an iterator that returns rows of fields
as KgtkValue objects, with empty fields returned as None.
When validate is True, validate each KgtkValue object.
"""
while True:
try:
yield self.to_concise_kgtk_values(self.nextrow(), validate=validate)
except StopIteration:
return
def to_dict(self, row: typing.List[str], concise: bool=False
)->typing.Mapping[str, str]:
"""
Convert an input row into a dict of named fields.
If concise is True, then empty fields will be skipped.
"""
results: typing.MutableMapping[str, str] = { }
field: str
idx: int = 0
# We'll use two seperate loops in anticipation of a modest
# efficiency gain.
if concise:
for field in row:
if len(field) > 0:
results[self.column_names[idx]] = field
idx += 1
else:
for field in row:
results[self.column_names[idx]] = field
idx += 1
return results
def dicts(self, concise: bool=False
)->typing.Iterator[typing.Mapping[str, str]]:
"""
Using a generator function, create an iterator that returns each row as a dict of named fields.
If concise is True, then empty fields will be skipped.
"""
while True:
try:
yield self.to_dict(self.nextrow(), concise=concise)
except StopIteration:
return
def to_kgtk_value_dict(self,
row: typing.List[str],
validate: bool=False,
parse_fields: bool=False,
concise: bool=False
)->typing.Mapping[str, KgtkValue]:
"""
Convert an input row into a dict of named fields.
If concise is True, then empty fields will be skipped.
When validate is True, validate each KgtkValue object.
"""
results: typing.MutableMapping[str, KgtkValue] = { }
idx: int = 0
field: str
for field in row:
if concise and len(field) == 0:
pass # Skip the empty field.
else:
kv = KgtkValue(field, options=self.value_options, parse_fields=parse_fields)
if validate:
kv.validate()
results[self.column_names[idx]] = kv
idx += 1
return results
def kgtk_value_dicts(self,
validate: bool=False,
parse_fields: bool=False,
concise: bool=False
)->typing.Iterator[typing.Mapping[str, KgtkValue]]:
"""
Using a generator function, create an iterator that returns each row as a
dict of named KgtkValue objects.
If concise is True, then empty fields will be skipped.
When validate is True, validate each KgtkValue object.
"""
while True:
try:
yield self.to_kgtk_value_dict(self.nextrow(), validate=validate, parse_fields=parse_fields, concise=concise)
except StopIteration:
return
def _ignore_invalid_values(self, row: typing.List[str], line: str)->bool:
"""Give a row of values, validate each value. If we find one or more
validation problems, we might want to emit error messages and we might
want to ignore the entire row.
If self.verbose is True, we use StringIO to capture additional error messages
from KgtkValue validation.
Returns True to indicate that the row should be ignored (skipped).
"""
problems: typing.List[str] = [ ] # Build a list of problems.
idx: int
item: str
for idx, item in enumerate(row):
if len(item) > 0: # Optimize the common case of empty columns.
error_buffer: io.StringIO = io.StringIO()
kv: KgtkValue = KgtkValue(item, options=self.value_options, error_file=error_buffer, verbose=self.verbose)
if not kv.is_valid():
if self.verbose:
problems.append("col %d (%s) value %s: %s" % (idx, self.column_names[idx], repr(item), error_buffer.getvalue().rstrip()))
problems.append("col %d (%s) value %s is an %s" % (idx, self.column_names[idx], repr(item), kv.describe()))
if kv.repaired:
# If this value was repaired, update the item in the row.
#
# Warning: We expect this change to be seen by the caller.
row[idx] = kv.value
error_buffer.close()
if len(problems) == 0:
return False
return self.exclude_line(self.options.invalid_value_action,
"\n".join(problems),
line)
def _ignore_prohibited_list(self,
idx: int,
row: typing.List[str],
line: str,
problems: typing.List[str],
):
if idx < 0:
return
item: str = row[idx]
if KgtkFormat.LIST_SEPARATOR not in item:
return
if len(KgtkValue.split_list(item)) == 1:
return
problems.append("col %d (%s) value '%s'is a prohibited list" % (idx, self.column_names[idx], item))
# May be overridden
def _ignore_prohibited_lists(self, row: typing.List[str], line: str)->bool:
"""
KGTK File Format v2 prohibits "|" lists in the node1, label, and node2 columns of edge files.
The ID column does not prohibit lists, even in node files.
"""
if not self.is_edge_file:
return False
problems: typing.List[str] = [ ] # Build a list of problems.
self._ignore_prohibited_list(self.node1_column_idx, row, line, problems)
self._ignore_prohibited_list(self.label_column_idx, row, line, problems)
self._ignore_prohibited_list(self.node2_column_idx, row, line, problems)
if len(problems) == 0:
return False
return self.exclude_line(self.options.prohibited_list_action,
"\n".join(problems),
line)
# May be overridden
def _ignore_if_blank_required_fields(self, values: typing.List[str], line: str)->bool:
if self.is_edge_file:
# Ignore line_action with blank node1 fields. This code comes after
# filling missing trailing columns, although it could be reworked
# to come first.
if self.options.blank_required_field_line_action != ValidationAction.PASS and self.node1_column_idx >= 0 and len(values) > self.node1_column_idx:
node1_value: str = values[self.node1_column_idx]
if len(node1_value) == 0 or node1_value.isspace():
return self.exclude_line(self.options.blank_required_field_line_action, "node1 is blank", line)
# Ignore lines with blank node2 fields:
if self.options.blank_required_field_line_action != ValidationAction.PASS and self.node2_column_idx >= 0 and len(values) > self.node2_column_idx:
node2_value: str = values[self.node2_column_idx]
if len(node2_value) == 0 or node2_value.isspace():
return self.exclude_line(self.options.blank_required_field_line_action, "node2 is blank", line)
elif self.is_node_file:
# Ignore line_action with blank id fields. This code comes after
# filling missing trailing columns, although it could be reworked
# to come first.
if self.options.blank_required_field_line_action != ValidationAction.PASS and self.id_column_idx >= 0 and len(values) > self.id_column_idx:
id_value: str = values[self.id_column_idx]
if len(id_value) == 0 or id_value.isspace():
return self.exclude_line(self.options.blank_required_field_line_action, "id is blank", line)
return False # Do not ignore this line.
# May be overridden
def _skip_reserved_fields(self, column_name)->bool:
return False
def additional_column_names(self)->typing.List[str]:
if self.is_edge_file:
return KgtkBase.additional_edge_columns(self.column_names)
elif self.is_node_file:
return KgtkBase.additional_node_columns(self.column_names)
else:
# TODO: throw a better exception.
raise ValueError("KgtkReader: Unknown Kgtk file type.")
def merge_columns(self, additional_columns: typing.List[str])->typing.List[str]:
"""
Return a list that merges the current column names with an additional set
of column names.
"""
merged_columns: typing.List[str] = self.column_names.copy()
column_name: str
for column_name in additional_columns:
if column_name in self.column_name_map:
continue
merged_columns.append(column_name)
return merged_columns
def _get_special_column_index(self, column_name_or_idx: typing.Optional[str], special_column_idx: int)->int:
"""
Get the special column index, unless an overriding column
index or name name is provided. Returns -1 if no column found.
Note: column index values start with 1 for the first column.
"""
if column_name_or_idx is None or len(column_name_or_idx) == 0:
return special_column_idx
elif column_name_or_idx in self.column_name_map:
# By performing this test before the isdigit() test, we give
# precedence to columns with names that are integers.
return self.column_name_map[column_name_or_idx]
elif column_name_or_idx.isdigit():
column_idx: int = int(column_name_or_idx) - 1
if column_idx < 0:
return -1
if column_idx >= self.column_count:
return -1
else:
return column_idx
else:
return -1
def get_node1_column_index(self, column_name_or_idx: typing.Optional[str] = None)->int:
"""
Get the node1 column index, unless an overriding column
index or name is provided. Returns -1 if no column found.
"""
return self._get_special_column_index(column_name_or_idx, self.node1_column_idx)
def get_node1_canonical_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the canonical name for the node1 column, unless an
overriding name is provided.
TODO: Should we convert column indexes to column names?
"""
if column_name is not None and len(column_name) > 0:
return column_name
else:
return KgtkFormat.NODE1
def get_node1_column_actual_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the actual name for the node1 column or its overriding column.
Return an empty string if the column was not found.
"""
idx: int = self.get_node1_column_index(column_name)
if idx >= 0:
return self.column_names[idx]
else:
return ""
def get_label_column_index(self, column_name_or_idx: typing.Optional[str] = None)->int:
"""
Get the label column index, unless an overriding column
index or name is provided. Returns -1 if no column found.
"""
return self._get_special_column_index(column_name_or_idx, self.label_column_idx)
def get_label_canonical_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the canonical name for the label column, unless an
overriding name is provided.
"""
if column_name is not None and len(column_name) > 0:
return column_name
else:
return KgtkFormat.LABEL
def get_label_column_actual_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the actual name for the label column or its overriding column.
Return an empty string if the column was not found.
"""
idx: int = self.get_label_column_index(column_name)
if idx >= 0:
return self.column_names[idx]
else:
return ""
def get_node2_column_index(self, column_name_or_idx: typing.Optional[str] = None)->int:
"""
Get the node2 column index, unless an overriding column
index or name is provided. Returns -1 if no column found.
"""
return self._get_special_column_index(column_name_or_idx, self.node2_column_idx)
def get_node2_canonical_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the canonical name for the node2 column, unless an
overriding name is provided.
"""
if column_name is not None and len(column_name) > 0:
return column_name
else:
return KgtkFormat.NODE2
def get_node2_column_actual_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the actual name for the node2 column or its overriding column.
Return an empty string if the column was not found.
"""
idx: int = self.get_node2_column_index(column_name)
if idx >= 0:
return self.column_names[idx]
else:
return ""
def get_id_column_index(self, column_name_or_idx: typing.Optional[str] = None)->int:
"""
Get the id column index, unless an overriding column
index or name is provided. Returns -1 if no column found.
"""
return self._get_special_column_index(column_name_or_idx, self.id_column_idx)
def get_id_canonical_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the canonical name for the id column, unless an
overriding name is provided.
"""
if column_name is not None and len(column_name) > 0:
return column_name
else:
return KgtkFormat.ID
def get_id_column_actual_name(self, column_name: typing.Optional[str]=None)->str:
"""
Get the actual name for the id column or its overriding column.
Return an empty string if the column was not found.
"""
idx: int = self.get_id_column_index(column_name)
if idx >= 0:
return self.column_names[idx]
else:
return ""
@classmethod
def add_debug_arguments(cls, parser: ArgumentParser, expert: bool = False):
# This helper function makes it easy to suppress options from
# The help message. The options are still there, and initialize
# what they need to initialize.
def h(msg: str)->str:
if expert:
return msg
else:
return SUPPRESS
egroup: _ArgumentGroup = parser.add_argument_group(h("Error and feedback messages"),
h("Send error messages and feedback to stderr or stdout, " +
"control the amount of feedback and debugging messages."))
# Avoid the argparse bug that prevents these two arguments from having
# their help messages suppressed directly.
#
# TODO: Is there a better fix?
#
# TODO: replace --errors-to-stdout and --errors-to-stderr with
# --errors-to=stdout and --errors-to=stderr, using either an enum
# or choices. That will avoid the argparse bug, too.
if expert:
errors_to = egroup.add_mutually_exclusive_group()
errors_to.add_argument( "--errors-to-stdout", dest="errors_to_stdout", metavar="optional True|False",
help="Send errors to stdout instead of stderr. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
errors_to.add_argument( "--errors-to-stderr", dest="errors_to_stderr", metavar="optional True|False",
help="Send errors to stderr instead of stdout. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
else:
egroup.add_argument( "--errors-to-stderr", dest="errors_to_stderr", metavar="optional True|False",
help=h("Send errors to stderr instead of stdout. (default=%(default)s),"),
type=optional_bool, nargs='?', const=True, default=False)
egroup.add_argument( "--errors-to-stdout", dest="errors_to_stdout", metavar="optional True|False",
help=h("Send errors to stdout instead of stderr. (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, default=False)
egroup.add_argument( "--show-options", dest="show_options", metavar="optional True|False",
help=h("Print the options selected (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, default=False)
egroup.add_argument("-v", "--verbose", dest="verbose", metavar="optional True|False",
help="Print additional progress messages (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
egroup.add_argument( "--very-verbose", dest="very_verbose", metavar="optional True|False",
help=h("Print additional progress messages (default=%(default)s)."),
type=optional_bool, nargs='?', const=True, default=False)
@classmethod
def show_debug_arguments(cls,
errors_to_stdout: bool = False,
errors_to_stderr: bool = True,
show_options: bool = False,
verbose: bool = False,
very_verbose: bool = False,
out: typing.TextIO = sys.stderr):
if errors_to_stdout:
print("--errors-to-stdout", file=out)
if errors_to_stderr:
print("--errors-to-stderr", file=out)
if show_options:
print("--show-options", file=out)
if verbose:
print("--verbose", file=out)
if very_verbose:
print("--very-verbose", file=out)
def report_summary(self):
print("Data lines read: %d" % self.data_lines_read, file=self.error_file, flush=True)
print("Data lines passed: %d" % self.data_lines_passed, file=self.error_file, flush=True)
if self.data_lines_skipped > 0:
print("Data lines skipped: %d" % self.data_lines_skipped, file=self.error_file, flush=True)
if self.data_lines_ignored > 0:
print("Data lines ignored: %d" % self.data_lines_ignored, file=self.error_file, flush=True)
if self.data_lines_filled > 0:
print("Data lines filled: %d" % self.data_lines_filled, file=self.error_file, flush=True)
if self.data_lines_truncated > 0:
print("Data lines truncated: %d" % self.data_lines_truncated, file=self.error_file, flush=True)
if self.data_lines_excluded_short > 0:
print("Data lines excluded due to too few columns: %d" % self.data_lines_excluded_short, file=self.error_file, flush=True)
if self.data_lines_excluded_long > 0:
print("Data lines excluded due to too many columns: %d" % self.data_lines_excluded_long, file=self.error_file, flush=True)
if self.data_lines_excluded_blank_fields > 0:
print("Data lines excluded due to blank fields: %d" % self.data_lines_excluded_blank_fields, file=self.error_file, flush=True)
if self.data_lines_excluded_invalid_values > 0:
print("Data lines excluded due to invalid values: %d" % self.data_lines_excluded_invalid_values, file=self.error_file, flush=True)
if self.data_lines_excluded_prohibited_lists > 0:
print("Data lines excluded due to prohibited lists: %d" % self.data_lines_excluded_prohibited_lists, file=self.error_file, flush=True)
if self.data_lines_excluded_by_filter > 0:
print("Data lines excluded due to a filter: %d" % self.data_lines_excluded_by_filter, file=self.error_file, flush=True)
if self.data_errors_reported > 0:
print("Data errors reported: %d" % self.data_errors_reported, file=self.error_file, flush=True)
def main():
"""
Test the KGTK file reader.
"""
# The EdgeReader import is deferred to avoid circular imports.
from kgtk.io.edgereader import EdgeReader
# The NodeReader import is deferred to avoid circular imports.
from kgtk.io.nodereader import NodeReader
parser = ArgumentParser()
parser.add_argument(dest="kgtk_file", help="The KGTK file to read", type=Path, nargs="?")
KgtkReader.add_debug_arguments(parser, expert=True)
parser.add_argument( "--test", dest="test_method", help="The test to perform (default=%(default)s).",
choices=["rows", "concise-rows",
"kgtk-values", "concise-kgtk-values",
"dicts", "concise-dicts",
"kgtk-value-dicts", "concise-kgtk-value-dicts"],
default="rows")
parser.add_argument( "--test-validate", dest="test_validate", help="Validate KgtkValue objects in test (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
KgtkReaderOptions.add_arguments(parser, mode_options=True, validate_by_default=True, expert=True)
KgtkValueOptions.add_arguments(parser, expert=True)
args: Namespace = parser.parse_args()
error_file: typing.TextIO = sys.stdout if args.errors_to_stdout else sys.stderr
# Build the option structures.
reader_options: KgtkReaderOptions = KgtkReaderOptions.from_args(args)
value_options: KgtkValueOptions = KgtkValueOptions.from_args(args)
if args.show_options:
print("--test=%s" % str(args.test), file=error_file)
print("--test-validate=%s" % str(args.test_validate), file=error_file)
reader_options.show(out=error_file)
value_options.show(out=error_file)
print("=======", file=error_file, flush=True)
kr: KgtkReader = KgtkReader.open(args.kgtk_file,
error_file = error_file,
options=reader_options,
value_options=value_options,
verbose=args.verbose,
very_verbose=args.very_verbose)
line_count: int = 0
row: typing.List[str]
kgtk_values: typing.List[KgtkValue]
concise_kgtk_values: typing.List[typing.Optional[KgtkValue]]
dict_row: typing.Mapping[str, str]
kgtk_value_dict: typing.Mapping[str, str]
if args.test_method == "rows":
if args.verbose:
print("Testing iterating over rows.", file=error_file, flush=True)
for row in kr:
line_count += 1
elif args.test_method == "concise-rows":
if args.verbose:
print("Testing iterating over concise rows.", file=error_file, flush=True)
for row in kr.concise_rows():
line_count += 1
elif args.test_method == "kgtk-values":
if args.verbose:
print("Testing iterating over KgtkValue rows.", file=error_file, flush=True)
for kgtk_values in kr.kgtk_values(validate=args.test_validate):
line_count += 1
elif args.test_method == "concise-kgtk-values":
if args.verbose:
print("Testing iterating over concise KgtkValue rows.", file=error_file, flush=True)
for kgtk_values in kr.concise_kgtk_values(validate=args.test_validate):
line_count += 1
elif args.test_method == "dicts":
if args.verbose:
print("Testing iterating over dicts.", file=error_file, flush=True)
for dict_row in kr.dicts():
line_count += 1
elif args.test_method == "concise-dicts":
if args.verbose:
print("Testing iterating over concise dicts.", file=error_file, flush=True)
for dict_row in kr.dicts(concise=True):
line_count += 1
elif args.test_method == "kgtk-value-dicts":
if args.verbose:
print("Testing iterating over KgtkValue dicts.", file=error_file, flush=True)
for kgtk_value_dict in kr.kgtk_value_dicts(validate=args.test_validate):
line_count += 1
elif args.test_method == "concise-kgtk-value-dicts":
if args.verbose:
print("Testing iterating over concise KgtkValue dicts.", file=error_file, flush=True)
for kgtk_value_dict in kr.kgtk_value_dicts(concise=True, validate=args.test_validate):
line_count += 1
print("Read %d lines" % line_count, file=error_file, flush=True)
if __name__ == "__main__":
main()
| 51.037131 | 188 | 0.584259 |
4b619ac7942b89a01c003f6c20172ce6e16138d9 | 7,152 | py | Python | pytext/models/representations/bilstm.py | kashpop/pytext | cb9fe110d69e39ac4cf6f2f29f707252999a0f8e | [
"BSD-3-Clause"
] | null | null | null | pytext/models/representations/bilstm.py | kashpop/pytext | cb9fe110d69e39ac4cf6f2f29f707252999a0f8e | [
"BSD-3-Clause"
] | null | null | null | pytext/models/representations/bilstm.py | kashpop/pytext | cb9fe110d69e39ac4cf6f2f29f707252999a0f8e | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from typing import Optional, Tuple
import torch
import torch.nn as nn
import torch.onnx
from pytext.config import ConfigBase
from pytext.utils import cuda
from pytext.utils.usage import log_class_usage
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from .representation_base import RepresentationBase
class BiLSTM(RepresentationBase):
"""
`BiLSTM` implements a multi-layer bidirectional LSTM representation layer
preceded by a dropout layer.
Args:
config (Config): Configuration object of type BiLSTM.Config.
embed_dim (int): The number of expected features in the input.
padding_value (float): Value for the padded elements. Defaults to 0.0.
Attributes:
padding_value (float): Value for the padded elements.
dropout (nn.Dropout): Dropout layer preceding the LSTM.
lstm (nn.LSTM): LSTM layer that operates on the inputs.
representation_dim (int): The calculated dimension of the output features
of BiLSTM.
"""
class Config(RepresentationBase.Config, ConfigBase):
"""
Configuration class for `BiLSTM`.
Attributes:
dropout (float): Dropout probability to use. Defaults to 0.4.
lstm_dim (int): Number of features in the hidden state of the LSTM.
Defaults to 32.
num_layers (int): Number of recurrent layers. Eg. setting `num_layers=2`
would mean stacking two LSTMs together to form a stacked LSTM,
with the second LSTM taking in the outputs of the first LSTM and
computing the final result. Defaults to 1.
bidirectional (bool): If `True`, becomes a bidirectional LSTM. Defaults
to `True`.
disable_sort_in_jit (bool): If `True`, disable sort in pack_padded_sequence
to allow inference on GPU. Defaults to `False`.
"""
dropout: float = 0.4
lstm_dim: int = 32
num_layers: int = 1
bidirectional: bool = True
pack_sequence: bool = True
disable_sort_in_jit: bool = False
def __init__(
self, config: Config, embed_dim: int, padding_value: float = 0.0
) -> None:
super().__init__(config)
self.padding_value: float = padding_value
self.dropout = nn.Dropout(config.dropout)
self.lstm = nn.LSTM(
embed_dim,
config.lstm_dim,
num_layers=config.num_layers,
bidirectional=config.bidirectional,
batch_first=True,
)
self.representation_dim: int = config.lstm_dim * (
2 if config.bidirectional else 1
)
self.pack_sequence = config.pack_sequence
self.disable_sort_in_jit = config.disable_sort_in_jit
log_class_usage(__class__)
def forward(
self,
embedded_tokens: torch.Tensor,
seq_lengths: torch.Tensor,
states: Optional[Tuple[torch.Tensor, torch.Tensor]] = None,
) -> Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]:
"""
Given an input batch of sequential data such as word embeddings, produces
a bidirectional LSTM representation of the sequential input and new state
tensors.
Args:
embedded_tokens (torch.Tensor): Input tensor of shape
(bsize x seq_len x input_dim).
seq_lengths (torch.Tensor): List of sequences lengths of each batch element.
states (Tuple[torch.Tensor, torch.Tensor]): Tuple of tensors containing
the initial hidden state and the cell state of each element in
the batch. Each of these tensors have a dimension of
(bsize x num_layers * num_directions x nhid). Defaults to `None`.
Returns:
Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: Bidirectional
LSTM representation of input and the state of the LSTM `t = seq_len`.
Shape of representation is (bsize x seq_len x representation_dim).
Shape of each state is (bsize x num_layers * num_directions x nhid).
"""
if self.dropout.p > 0.0:
embedded_tokens = self.dropout(embedded_tokens)
if states is not None:
# convert (h0, c0) from (bsz x num_layers*num_directions x nhid) to
# (num_layers*num_directions x bsz x nhid)
states = (
states[0].transpose(0, 1).contiguous(),
states[1].transpose(0, 1).contiguous(),
)
else:
# We need to send in a zero state that matches the batch size, because
# torch.jit tracing currently traces this as constant and therefore
# locks the traced model into a static batch size.
# see https://github.com/pytorch/pytorch/issues/16664
state = torch.zeros(
self.config.num_layers * (2 if self.config.bidirectional else 1),
embedded_tokens.size(0), # batch size
self.config.lstm_dim,
device=torch.cuda.current_device() if cuda.CUDA_ENABLED else None,
)
states = (state, state)
if torch.onnx.is_in_onnx_export():
lstm_in = [embedded_tokens.contiguous(), states[0], states[1]] + [
param.detach() for param in self.lstm._flat_weights
]
rep, new_state_0, new_state_1 = torch.ops._caffe2.InferenceLSTM(
lstm_in,
self.lstm.num_layers,
self.lstm.bias,
True,
self.lstm.bidirectional,
)
new_state = (new_state_0, new_state_1)
else:
if self.pack_sequence:
# We need to disble sorting when jit trace because it introduce
# issues with sorted indices not in right device in pack_padded_sequence
# using GPU inference
rnn_input = pack_padded_sequence(
embedded_tokens,
seq_lengths.cpu(),
batch_first=True,
enforce_sorted=True
if self.disable_sort_in_jit
and torch._C._get_tracing_state() is not None
else False,
)
else:
rnn_input = embedded_tokens
rep, new_state = self.lstm(rnn_input, states)
if self.pack_sequence:
rep, _ = pad_packed_sequence(
rep,
padding_value=self.padding_value,
batch_first=True,
total_length=embedded_tokens.size(1),
)
# Make sure the output from LSTM is padded to input's sequence length.
# convert states back to (bsz x num_layers*num_directions x nhid) to be
# used in data parallel model
new_state = (new_state[0].transpose(0, 1), new_state[1].transpose(0, 1))
return rep, new_state
| 42.070588 | 88 | 0.604306 |
5a8246549294bce1419da602fc67e2c466a8bbea | 488 | py | Python | casepy/eulerRuO2/nSod32768x1/chars.py | will-iam/Variant | 5b6732134fd51cf6c2b90b51b7976be0693ba28d | [
"MIT"
] | 8 | 2017-05-04T07:50:02.000Z | 2019-05-17T02:27:20.000Z | casepy/eulerRuO2/nSod32768x1/chars.py | will-iam/Variant | 5b6732134fd51cf6c2b90b51b7976be0693ba28d | [
"MIT"
] | null | null | null | casepy/eulerRuO2/nSod32768x1/chars.py | will-iam/Variant | 5b6732134fd51cf6c2b90b51b7976be0693ba28d | [
"MIT"
] | null | null | null | import sys, os
sys.path.insert(1, os.path.join(sys.path[0], '../../../'))
import script.rio as io
import script.initial_condition.sod as sod
# Domain properties
lx = 1.0
ly = 1.0
Nx = 32768
Ny = 1
# Scheme execution options
T = 0.2
CFL = 0.5
gamma = 1.4
BCtype = 'N'
BClayer = 1
quantityList = ['rho', 'rhou_x', 'rhou_y', 'rhoE']
def buildme(quantityDict, coords_to_uid, coords_to_bc):
sod.build(quantityDict, coords_to_uid, coords_to_bc, Nx, Ny, lx, ly, BClayer, BCtype, gamma)
| 20.333333 | 96 | 0.684426 |
4b2d66d52126672b5084b1cffdcf76ee14f2722e | 3,940 | py | Python | django/templatetags/static.py | Miserlou/django | 35ddeee45573de57ae3c791bf36496b4a7028ddf | [
"BSD-3-Clause"
] | 1 | 2016-03-07T15:37:48.000Z | 2016-03-07T15:37:48.000Z | django/templatetags/static.py | erdem/django | 76d5daa60f90d3692b3ff3b7f5054e4bc7c1f374 | [
"BSD-3-Clause"
] | null | null | null | django/templatetags/static.py | erdem/django | 76d5daa60f90d3692b3ff3b7f5054e4bc7c1f374 | [
"BSD-3-Clause"
] | null | null | null | from urlparse import urljoin
from django import template
from django.template.base import Node
from django.utils.encoding import iri_to_uri
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the media prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
class StaticNode(Node):
def __init__(self, varname=None, path=None):
if path is None:
raise template.TemplateSyntaxError(
"Static template nodes must be given a path to return.")
self.path = path
self.varname = varname
def url(self, context):
path = self.path.resolve(context)
return self.handle_simple(path)
def render(self, context):
url = self.url(context)
if self.varname is None:
return url
context[self.varname] = url
return ''
@classmethod
def handle_simple(cls, path):
return urljoin(PrefixNode.handle_simple("STATIC_URL"), path)
@classmethod
def handle_token(cls, parser, token):
"""
Class method to parse prefix node and return a Node.
"""
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"'%s' takes at least one argument (path to file)" % bits[0])
path = parser.compile_filter(bits[1])
if len(bits) >= 2 and bits[-2] == 'as':
varname = bits[3]
else:
varname = None
return cls(varname, path)
@register.tag('static')
def do_static(parser, token):
"""
Joins the given path with the STATIC_URL setting.
Usage::
{% static path [as varname] %}
Examples::
{% static "myapp/css/base.css" %}
{% static variable_with_path %}
{% static "myapp/css/base.css" as admin_base_css %}
{% static variable_with_path as varname %}
"""
return StaticNode.handle_token(parser, token)
def static(path):
return StaticNode.handle_simple(path)
| 24.936709 | 76 | 0.596954 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.