content
stringlengths 5
1.05M
|
|---|
from typing import List
import pyblish.api
import openpype.hosts.blender.api.action
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
"""There cannot be colons in names
Object or bone names cannot include colons. Other software do not
handle colons correctly.
"""
order = openpype.api.ValidateContentsOrder
hosts = ["blender"]
families = ["model", "rig"]
version = (0, 1, 0)
label = "No Colons in names"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
if ':' in obj.name:
invalid.append(obj)
if obj.type == 'ARMATURE':
for bone in obj.data.bones:
if ':' in bone.name:
invalid.append(obj)
break
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Objects found with colon in name: {invalid}")
|
import glob
import os
import pandas as pd
import argparse
import numpy as np
import cv2
import time
import json
from fire import Fire
from tqdm import tqdm
import random
from track_and_detect_new import Track_And_Detect
def data_process(json_file_path ='/export/home/zby/SiamFC-PyTorch/data/posetrack/annotations/val'):
print('----------------------------------------')
print('Loading images....')
# load videos
json_files = os.listdir(json_file_path)
random_number = random.randint(0,len(json_files))
json_name = json_files[random_number]
json_name = '013534_mpii_test.json'
json_file = os.path.join(json_file_path,json_name)
with open(json_file,'r') as f:
annotation = json.load(f)['annotations']
bbox_dict = dict()
for anno in annotation:
track_id = anno['track_id']
frame_id = anno['image_id'] % 1000
if not 'bbox' in anno or frame_id!=0:
continue
bbox = anno['bbox']
if bbox[2]==0 or bbox[3]==0:
continue
bbox = [bbox[0], bbox[1], bbox[2], bbox[3]]
if not track_id in bbox_dict:
bbox_dict[track_id] = {'bbox':bbox,'frame_id':frame_id}
image_path = json_file.replace('annotations','images').replace('.json','')
filenames = sorted(glob.glob(os.path.join(image_path, "*.jpg")),
key=lambda x: int(os.path.basename(x).split('.')[0]))
frames = [cv2.imread(filename) for filename in filenames]
#frames = [cv2.cvtColor(cv2.imread(filename), cv2.COLOR_BGR2RGB) for filename in filenames]
print('Images has been loaded')
im_H,im_W,im_C = frames[0].shape
videoWriter = cv2.VideoWriter('/export/home/zby/SiamFC/data/result/{}.avi'.format(json_name.replace('.json','')),cv2.cv2.VideoWriter_fourcc('M','J','P','G'),10,(im_W,im_H))
return videoWriter, frames, bbox_dict, json_name
def track(gpu_id =0):
videoWriter, frames, bbox_dict, json_name = data_process()
tracker = Track_And_Detect(gpu_id=0)
# starting tracking
#print(bbox_dict)
print('First frame has totally {} boxes for tracking'.format(len(bbox_dict)))
bbox_list = []
for key, item in bbox_dict.items():
x,y,w,h = item['bbox']
bbox_list.append([x, y, x+w, y+h, 1])
score_thresh = 0.01
predict_dict = dict()
tracker.init_tracker(frames[0],bbox_list)
pbar = tqdm(range(len(frames)))
for idx, frame in enumerate(frames):
flag = False
det_id = []
start_time = time.time()
bbox_list = tracker.multi_track(frame)
#print(bbox_list)
end_time = time.time()
pbar.update(1)
pbar.set_description('Processing video {} : Tracking the {:>3}th frame has taken {:.3f} seconds'.format(json_name, idx+1,end_time-start_time))
for bbox in bbox_list:
score,track_id = bbox[4], bbox[5]
if score < score_thresh:
flag = True
det_id.append(track_id)
if flag == True:
pbar.set_description('Track id {} in {}th frame has failed, need detection to revise'.format(det_id, idx+1))
start_time = time.time()
det_boxes = tracker.detect(frame)
tracker.match_id(det_boxes)
det_poses = []
for det_box in det_boxes:
pose_position, pose_val, pose_heatmap = tracker.pose_detect(frame, det_box)
#print(det_box, pose_position)
det_poses.append(pose_position)
#print(det_poses)
pbar.set_description('Detect the {}th frame has taken {} seconds'.format(idx+1,time.time()-start_time))
for bbox in bbox_list:
xmin,ymin,xmax,ymax,score,track_id = bbox
score = np.round(score, 3)
if score< score_thresh:
cv2.rectangle(frame, (int(xmin),int(ymin)), (int(xmax),int(ymax)), (0,0,255), 2)
cv2.putText(frame, 'id:'+str(track_id)+' score:'+str(score), (int(xmin),int(ymin)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 0, 255), 1)
else:
cv2.rectangle(frame, (int(xmin),int(ymin)), (int(xmax),int(ymax)), (0,255,0), 2)
cv2.putText(frame, 'id:'+str(track_id)+' score:'+str(score), (int(xmin),int(ymin)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 255, 0), 1)
if flag == True:
for i,bbox in enumerate(det_boxes):
det_pose = det_poses[i]
for position in det_pose:
x,y = position
cv2.circle(frame,(int(x),int(y)),10,(0,0,255),-1)
xmin,ymin,xmax,ymax,score = bbox
cv2.rectangle(frame, (int(xmin),int(ymin)), (int(xmax),int(ymax)), (0,255,255), 2)
cv2.putText(frame, 'detection', (int(xmin),int(ymin)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 255, 0), 1)
videoWriter.write(frame)
pbar.close()
if __name__ == "__main__":
#track_and_detect(model_path='/export/home/zby/SiamFC/models/output/siamfc_35.pth')
#mytrack = Track_And_Detect(gpu_id=0)
track(gpu_id=0)
# tracker = Track_And_Detect(gpu_id=[0,0,0])
# im_path = '/export/home/zby/SiamFC/data/demo_images/13.jpg'
# im = cv2.imread(im_path)
# bbox = [75,56,360,380,0.99,8]
# preds, maxvals, heatmaps = tracker.pose_detect(im, bbox)
# for pred in preds:
# x,y = pred
# cv2.circle(im,(int(x),int(y)), 5, (0,0,255), -1)
# cv2.imwrite('result.jpg',im)
|
from pyomo.environ import *
def create_pf_constraints_future(M, surrogate_grid, opf_method):
assert opf_method in ['exact', 'lossless'], 'Unknown opf_method %s' % opf_method
# Ohm's Law
M.ohm_law_future_constraints = ConstraintList()
for sc_ind in range(M.n_scenarios):
for t_ind_true in M.timesteps_future:
t_ind = t_ind_true - M.t_current_ind - 1
for line_ind, line in enumerate(surrogate_grid.lines):
node_from_ind = surrogate_grid.nodes.index(line.node_from)
node_to_ind = surrogate_grid.nodes.index(line.node_to)
line_i = line.g * (M.V_nodes_sc[(sc_ind, t_ind_true, node_from_ind)] -
M.V_nodes_sc[(sc_ind, t_ind_true, node_to_ind)])
M.ohm_law_future_constraints.add(M.I_lines_sc[(sc_ind, t_ind_true, line_ind)] == line_i)
# Power flow
M.power_balance_future_constraints = ConstraintList()
for sc_ind in range(M.n_scenarios):
for t_ind_true in M.timesteps_future:
t_ind = t_ind_true - M.t_current_ind - 1
for n1_ind, n1 in enumerate(surrogate_grid.nodes):
v_node = M.V_nodes_sc[(sc_ind, t_ind_true, n1_ind)] if opf_method == 'exact' else M.ref_voltage
p_node = -v_node * sum([surrogate_grid.Y[n1_ind, n2_ind] * M.V_nodes_sc[(sc_ind, t_ind_true, n2_ind)]
for n2_ind in range(surrogate_grid.n_nodes)])
M.power_balance_future_constraints.add(M.P_nodes_sc[(sc_ind, t_ind_true, n1_ind)] == p_node)
|
from enum import Enum, unique
class MultiscaleData:
"""Maintains the multiscale data for relative area and complexity"""
def set_vals_at_scale(self, scale, relative_area, complexity):
"""Assign relative area & complexity values at the given scale."""
self._scales.add(scale)
self._area_map[scale] = relative_area
self._complexity_map[scale] = complexity
def __init__(self, name="",
scales=[], relative_area=[], complexity=[],
row_labels=["Relative Area", "Multiscale complexity"]):
"""Generate MultiscaleData using given values."""
self.name = name
self.regress_val = self.name
self.row_labels = row_labels
self._scales = set()
self._area_map = {}
self._complexity_map = {}
# Run through collected scales and map them in order to areas and complexities
for i, scale in enumerate(scales):
self.set_vals_at_scale(scale, relative_area[i], complexity[i])
def get_scales(self): return self._scales
def get_relative_area(self, scale): return self._area_map.get(scale)
def get_complexity(self, scale): return self._complexity_map.get(scale)
class MultiscaleDisjointDatasetException(Exception):
def __init__(self, dataset:MultiscaleData):
super().__init__("The dataset " + dataset.name + " being added has disjoint scale values.")
_SUCCESS = 0
class DatasetAppendOutput:
"""Stores output values and flags from appending functions.
By default is a success flag and no output."""
# Used to define various outputs that are returns by the append function.
SUCCESS = _SUCCESS # SUCCESS - Has no output values
SCALES_IGNORED_ERROR = -2 # SCALES_IGNORED - Outputs list of MulticaleDatasets that had ignored scales
ERROR = -1 # ERROR - A general error has occured
def __init__(self, flag=_SUCCESS, output=None):
"""Output for MultiscaleDataset append function.
@param flag - Integer flag value that corresponds to the success/error value of output.
@param output - Value of output. If SUCCESS, output None."""
self._value = output
self._flag = flag
def __eq__(self, other):
"""Can be used to compare against itself or against AppendOutputEnums."""
if isinstance(other, DatasetAppendOutput):
return other._flag == self._flag
elif isinstance(other, int):
return other == self._flag
else:
return False
def __bool__(self):
"""Can be used to check if an append was successful."""
return self._flag == _SUCCESS
def get_value(self):
"""Get value of output."""
return self._value
def _multiscaledataset_append_ignore_unaligned_scales(self, dataset:MultiscaleData) -> DatasetAppendOutput:
"""Add new result data to dataset. Ignores unaligned scales.
@param scale_data - scale data to be inserted. Cannot be none."""
# if the dataset is unusable (scales are disjoint) do not continue
new_scales = dataset.get_scales()
if self._scales.isdisjoint(new_scales):
raise MultiscaleDisjointDatasetException(dataset)
# Add data to dataset
new_index = len(self._datasets)
self._datasets.append(dataset)
self._names.append(dataset.name)
self._regress_vals.append(dataset.regress_val)
self._row_labels.append(dataset.row_labels)
# Create unified scale set through intersection of scale sets
unified_scales = self._scales.intersection(new_scales)
# Check if the unified scale values have not changed
scales_changed = not self._scales == unified_scales
self._scales = unified_scales
del unified_scales
# Update scales list if needed
if scales_changed: self._scales_list = self.build_ordered_scales()
# Run through dataset and generate/modify unified data values
for index in range(new_index, -1, -1):
# Get current dataset being looked at
curr_dataset = self._datasets[index]
# Generate new area lists, and complexity lists
new_areas = []
new_complexities = []
for scale in self._scales_list:
new_areas.append(curr_dataset.get_relative_area(scale))
new_complexities.append(curr_dataset.get_complexity(scale))
# replacable_list - Defines the lists that needed to be be given modified items
# at the current index. For relative area and complexity.
# [0] = List to be changed
# [1] = Item to be added
replacable_lists = [(self._areas, new_areas),
(self._complexities, new_complexities)]
# Replace currently selected dataset lists for area and complexity
for curr_list, new_item in replacable_lists:
if index != new_index:
curr_list.pop(index)
curr_list.insert(index, new_item)
# If the scale values have not changed and the newly added dataset has been added
# stop generating new data lists
if not scales_changed and index == new_index:
break
# Build regression sets after creating new relative area list
self._regress_sets = self.build_regress_sets()
# Output SCALES_IGNORED_ERROR if scales were in-fact ignored
if scales_changed: return DatasetAppendOutput(DatasetAppendOutput.SCALES_IGNORED_ERROR, [dataset])
# Otherwise, output successful value
else: return DatasetAppendOutput()
class DatasetAppendOptions(Enum):
"""Used to define the insert function to be used by Dataset.insertData."""
# --- Functions used are defined further down ---
IgnoreUnaligned = (_multiscaledataset_append_ignore_unaligned_scales)
def __init__(self, append_func):
"""Defines enum which stores a function that can be called later."""
self.append_func = append_func
_TABLE_SCALE_COLUMN_LABEL = "Scale of Analysis"
class MultiscaleDataset:
"""Maintains the full dataset being used for analysis, and helps with handling scale discrepencies."""
def __init__(self):
"""Creates empty dataset. To add to the dataset, use append_data."""
# Set of unified scales, used for efficient scale checking
self._scales = set()
# List of unified scales, equivalent to scales set but in order
self._scales_list = []
# List of data in dataset
self._datasets = []
# --- Following lists are in corresponding order to _datasets ---
self._names = []
self._regress_vals = []
self._row_labels = []
# Unified 2D list of list of area values for each set
self._areas = []
# Unified 2D list of list of complexity values for each set
self._complexities = []
# Unified 2D list of list of regression set rows
self._regress_sets = []
def append_data(self, dataset, option=DatasetAppendOptions.IgnoreUnaligned) -> DatasetAppendOutput:
"""Add data to dataset.
@param scale_data - scale data to be inserted. Needs to be either a MulticaleDataset or list of MulticaleDatasets.
@param option - Option configuration option for appending data."""
if isinstance(dataset, list):
return self._append_data_list(dataset, option)
elif isinstance(dataset, MultiscaleData):
return self._append_data_single(dataset, option)
else:
raise ValueError("Argument 'dataset' cannot be of type '" + type(dataset).__name__ + "'.")
return DatasetAppendOutput(DatasetAppendOutput.ERROR)
def _append_data_single(self, dataset:MultiscaleData, option:DatasetAppendOptions) -> DatasetAppendOutput:
"""Add data to dataset.
@param scale_data - scale data to be inserted.
@param option - Option configuration option for appending data."""
# If dataset has not been initialized, define it
if not self._datasets:
self._scales = dataset.get_scales()
self._scales_list = self.build_ordered_scales()
self._datasets.append(dataset)
self._names.append(dataset.name)
self._regress_vals.append(dataset.regress_val)
self._row_labels.append(dataset.row_labels)
self._areas.append([dataset.get_relative_area(scale) for scale in sorted(self._scales)])
self._complexities.append([dataset.get_complexity(scale) for scale in sorted(self._scales)])
self._regress_sets = self.build_regress_sets()
return DatasetAppendOutput()
# Add to dataset
else:
return option(self, dataset)
def _append_data_list(self, dataset_list:list, option:DatasetAppendOptions) -> DatasetAppendOutput:
"""Add list of data to dataset.
@param scale_data - list of scale data to be inserted.
@param option - Option configuration option for appending data."""
ignore_list = []
for dataset in dataset_list:
output = self._append_data_single(dataset, option)
# If ignore error occured, added to ignore list
if not output and output == DatasetAppendOutput.SCALES_IGNORED_ERROR:
ignore_list += output._value
# If ignore error occured, output error
if ignore_list: return DatasetAppendOutput(DatasetAppendOutput.SCALES_IGNORED_ERROR, ignore_list)
# Return successful output
else: return DatasetAppendOutput()
def _append_ignore_unaligned_scales(self, dataset:MultiscaleData) -> DatasetAppendOutput:
"""Add new result data to dataset. Ignores unaligned scales.
@param scale_data - scale data to be inserted. Cannot be none."""
return _multiscaledataset_append_ignore_unaligned_scales(self, dataset)
def build_ordered_scales(self):
"""Outputs and builds the scale set as an ordered list."""
return sorted(self._scales)
def build_regress_sets(self):
"""Iterate over all of the relative areas from each dataset, and compile them into a regress set.
Create a list of relative areas for each scale at each index.
@return List of compiled regression sets from relative areas."""
output_reg_set = []
for y_values in zip(*self.get_relative_area()):
# the list of all relative areas at the same scale for the different data sets
# these lists are then appended to regress sets such that
# there is a list of relative areas for each scale.
output_reg_set.append(list(y_values))
return output_reg_set
def build_table_data(self):
"""Builds data to be display on the table.
@return Dictionary with (row,column) position tuple as key"""
# Start with scale of analysis column label
data_dict = {(1,0): _TABLE_SCALE_COLUMN_LABEL}
data_column_lists = (self._areas, self._complexities)
# Define scale columns
data_dict[(1, 0)] = _TABLE_SCALE_COLUMN_LABEL
for index in range(len(self._scales_list)):
data_dict[(index + 2, 0)] = self._scales_list[index]
# Define data columns
for col_index in range(1, len(self._datasets) * 2 + 1):
# Pre-calculate indexes for later
offset_index = col_index - 1
data_col_pos = offset_index % 2
dataset_index = offset_index // 2
# Write data name labels
data_dict[(0,col_index)] = self._names[dataset_index]
# Write data column labels
data_dict[(1,col_index)] = self._row_labels[dataset_index][data_col_pos]
# Write data columns
selected_data = data_column_lists[data_col_pos][dataset_index]
for row in range(len(selected_data)):
data_dict[(row + 2), col_index] = selected_data[row]
return data_dict
def get_size(self): return len(self._datasets)
def get_results_scale(self): return self._scales_list
def get_legend_txt(self): return self._names
def get_row_labels(self): return self._row_labels
def get_x_regress(self): return self._regress_vals
def set_x_regress(self, new_regress_vals): self._regress_vals = new_regress_vals
def get_relative_area(self): return self._areas
def get_complexity(self): return self._complexities
def get_regress_sets(self): return self._regress_sets
@unique
class DatasetAppendOptions(Enum):
"""Used to define the insert function to be used by Dataset.insertData."""
# --- Functions MUST be redefined/reassigned later ---
@staticmethod
def ignore_unaligned_append(self, dataset:MultiscaleData) -> DatasetAppendOutput: pass
# --- Should use above functions are defined further down ---
IgnoreUnaligned = (ignore_unaligned_append)
def __init__(self, insert_func):
self.append_data = insert_func
def __repr__(self): return self
# Define functions for DatasetAppendOptions
DatasetAppendOptions.IgnoreUnaligned.append_func = MultiscaleDataset._append_ignore_unaligned_scales
|
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import os
import glob
from sklearn.manifold import TSNE
from ..datasets.datasets_nature import NucleiDatasetWrapper, OmicDatasetWrapper
from .utils import sample_encodings
def plot_encodings(model, dataset, tsne=False, dim=3, legend=False, legend_labels=None,
title=None, colors=None, save=False):
if isinstance(model, list):
assert len(model) == len(dataset)
encodings = []
for i, mod in enumerate(model):
encodings.append(sample_encodings(mod, dataset[i], tsne=False))
encodings_py = np.concatenate(encodings)
if tsne:
encodings_py = TSNE(n_components=dim).fit_transform(encodings_py)
else:
encodings_py = sample_encodings(model, dataset, tsne, dim)
if colors is None:
# Default colors to labels
if isinstance(dataset, list):
lab = []
for i, data in enumerate(dataset):
if isinstance(data, NucleiDatasetWrapper) or isinstance(data, OmicDatasetWrapper) or hasattr(
data, 'no_slicing'):
nlab = len(set(list(map(lambda x: x[1].numpy().item(), data))))
lab.append(np.array(list(map(lambda x: x[1] + (i * nlab), data))))
else:
nlab = len(set(data[0][:][1].numpy()))
lab.append(np.array([int(item[1]) + (i * nlab) for item in data]))
colors = np.concatenate(lab)
else:
if isinstance(dataset, NucleiDatasetWrapper) or isinstance(dataset, OmicDatasetWrapper) or hasattr(dataset,
'no_slicing'):
colors = np.array(list(map(lambda x: x[1], dataset)))
else:
colors = np.array([int(item[1]) for item in dataset])
# colors = dataset[:][1].numpy()
if dim == 3:
plot_3d(encodings_py[:, 0], encodings_py[:, 1], encodings_py[:, 2], colors=colors,
legend=legend, legend_labels=legend_labels, title=title, save=save)
elif dim == 2:
plot_2d(encodings_py[:, 0], encodings_py[:, 1], colors=colors,
legend=legend, legend_labels=legend_labels, title=title, save=save)
def plot_3d(x, y, z, colors=False, save=False, legend=False, legend_labels=None, title=None, static_size=False):
fig, ax = plt.subplots(figsize=(11, 7))
ax = fig.add_subplot(111, projection='3d')
if colors is not False:
sc = ax.scatter(x, y, z, c=colors, cmap='Paired')
else:
ax.scatter(x, y, z)
# ax.set_xlabel('X Label')
# ax.set_ylabel('Y Label')
# ax.set_zlabel('Z Label')
if legend:
ax.legend(sc.legend_elements()[0], legend_labels) if legend_labels != None else plt.legend(
*sc.legend_elements())
if static_size:
ax.set_xlim((-70, 70))
ax.set_ylim((-70, 70))
ax.set_zlim((-70, 70))
if save:
os.makedirs('imgs', exist_ok=True)
if os.path.isfile("imgs/plot_3d.png"):
n_file = 1 + max(list(
map(lambda x: int(x),
map(lambda x: x if x != '3d' else 0,
map(lambda x: x.split(".png")[0].split("_")[-1],
glob.glob("imgs/plot_2d*.png"))))))
file_suffix = '_' + str(n_file)
else:
file_suffix = ''
plt.savefig(os.path.join('imgs', 'plot_3d' + file_suffix + '.png'))
if title is not None:
plt.title(title)
plt.show()
def plot_2d(x, y, colors=False, save=False, legend=False, legend_labels=None, title=None):
fig, ax = plt.subplots(figsize=(11, 7))
if colors is not False:
sc = ax.scatter(x, y, c=colors, cmap='Paired')
else:
ax.scatter(x, y)
if legend:
ax.legend(sc.legend_elements()[0], legend_labels) if legend_labels != None else plt.legend(
*sc.legend_elements())
if save:
os.makedirs('imgs', exist_ok=True)
if os.path.isfile("imgs/plot_2d.png"):
n_file = 1 + max(list(
map(lambda x: int(x),
map(lambda x: x if x != '2d' else 0,
map(lambda x: x.split(".png")[0].split("_")[-1],
glob.glob("imgs/plot_2d*.png"))))))
file_suffix = '_' + str(n_file)
else:
file_suffix = ''
plt.savefig(os.path.join('imgs', 'plot_2d' + file_suffix + '.png'))
if title is not None:
plt.title(title)
plt.show()
def plot_loss_from_epoch(train_loss, test_loss=None, parameters=None, from_epoch=0, to_epoch=30, plot_regularizer=True,
save=False):
fig, ax = plt.subplots(figsize=(11, 7))
# plt.ylim((1, 4.5))
ax.plot(range(from_epoch, to_epoch), train_loss[0][from_epoch:to_epoch], '-bD', label="train loss, MSE")
if plot_regularizer:
ax.plot(range(from_epoch, to_epoch), train_loss[1][from_epoch:to_epoch], '-rD', label="train loss, regularizer")
if test_loss is not None:
ax.plot(range(from_epoch, to_epoch), test_loss[from_epoch:to_epoch], '-gD', label="test loss, MSE only")
if parameters is not None:
plt.title("floss, adam, lr = {}/{} steps, GAMMA={}, W decay={}", str(parameters['LR']),
str(parameters['STEP_SIZE']),
str(parameters['GAMMA']),
str(parameters['WEIGHT_DECAY']))
ax.legend()
ax.grid(axis='y')
if save:
plt.savefig('loss.png')
plt.show()
|
from core.advbase import *
from slot.a import *
from module.x_alt import Fs_alt
def module():
return Su_Fang
class Su_Fang(Adv):
a3 = ('s',0.35)
conf = {}
conf['slots.a'] = Twinfold_Bonds()+The_Fires_of_Hate()
conf['acl'] = """
`dragon.act("c3 s end")
`s3, not self.s3_buff
`s2, fsc
`s1
`fs, x=4
"""
coab = ['Blade','Dragonyule_Xainfried','Lin_You']
def fs_proc_alt(self, e):
self.afflics.poison('fs', 120, 0.582)
def prerun(self):
conf_fs_alt = {'fs.dmg': 0.174, 'fs.hit': 6}
self.fs_alt = Fs_alt(self, Conf(conf_fs_alt), self.fs_proc_alt)
self.s2_buff = Selfbuff('s2', 0.30, 15)
def s1_proc(self, e):
with KillerModifier('skiller', 'hit', 0.50, ['poison']):
self.dmg_make(e.name, 5.58)
if self.s2_buff.get():
self.dmg_make(e.name, 2.60)
self.hits += 2
def s2_proc(self, e):
self.fs_alt.on(1)
self.s2_buff = Selfbuff(e.name, 0.30, 15).on()
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv)
|
__author__ = "Haim Adrian"
from matplotlib import pyplot as plt
import numpy as np
import cv2
from numbers import Number
# Gradient Descent
def myGradDescent(sensitivity=10 ** (-7)):
# Function Definition
x = np.arange(0, 100, 0.001)
fx = (np.sin(x) + 0.25 * np.abs(x - 30)) * (x - 50) ** 2
# Random starting point
x0 = np.random.uniform(0, 100)
x1 = x0 + 0.01
xs = x1
# Gradient
df = np.gradient(fx)
dff = np.gradient(df)
df0 = df[np.argmin(np.abs(x - x0))]
df1 = df[np.argmin(np.abs(x - x1))]
eps = (df1 - df0) ** 2
# Descent loop
plt.figure()
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.plot(x, fx)
plt.plot(xs, fx[np.argmin(np.abs(x - xs))], 'gx')
plt.grid()
while eps > sensitivity:
df0 = df[np.argmin(np.abs(x - x0))]
df1 = df[np.argmin(np.abs(x - x1))]
eps = (df1 - df0) ** 2
# Avoid division by zero
if eps:
# t = (x1 - x0) * (df1 - df0) / ((df1 - df0) ** 2)
# In case of 1 dimension, the equation can be shortened:
t = (x1 - x0) / (df1 - df0)
else:
t = 0
x0 = x1
x0, x1 = x1, x0 - (np.abs(t) * df0)
# Gradient Ascent would be calculated as: x0 + (np.abs(t) * df0)
# Avoid going out of boundaries
if x1 < x[0]:
x1 = x0
eps = -1
elif x1 > x[-1]:
x1 = x0
eps = -1
# Dynamic graph
plt.plot(x1, fx[np.argmin(np.abs(x - x1))], 'ro')
plt.pause(0.2)
# plt.cla()
plt.title('Done')
# plt.plot(x, fx)
plt.plot(xs, fx[np.argmin(np.abs(x - xs))], 'gx')
plt.plot(x1, fx[np.argmin(np.abs(x - x1))], 'bo')
# plt.grid()
plt.show()
myGradDescent(1e-20)
|
from django.db import models
from django.urls import reverse
class Collection(models.Model):
description = models.TextField()
name = models.CharField(max_length=200)
photo_url = models.TextField()
def __str__(self):
return self.name
class Photo(models.Model):
collection = models.ForeignKey(
Collection, on_delete=models.CASCADE, related_name='photos')
date = models.CharField(max_length=100, )
title = models.CharField(max_length=100)
photo_url = models.TextField()
location = models.CharField(max_length=100)
# type = models.models.CharField(max_length=50)
def __str__(self):
return self.title
class Form(model.Model):
autor = models.CharField(max_length=100, )
comment = models.CharField(max_length=100, )
def_str_(self):
return self.comment
|
from enum import Enum
from dataclasses import dataclass
class TokenType(Enum):
NUM = 0
VAR = 1
PLUS = 2
MINUS = 3
MULTIPLY = 4
DIVIDE = 5
POWER = 6
LEFT_PARENTHESES = 7
RIGHT_PARENTHESES = 8
@dataclass
class Token:
type: TokenType
value: any = None
def __repr__(self):
return self.type.name + (f": {self.value}" if self.value != None else "")
|
import torch
from torch.cuda.amp import autocast
import torch.distributed as dist
from functools import reduce
import basics.base_utils as _
from mlpug.trainers.training import *
from mlpug.trainers.training import TrainingManager as TrainingManagerBase
from mlpug.mlpug_exceptions import TrainerInvalidException, BatchNotChunkableException, LossNotAvailableException
from mlpug.pytorch.utils import is_chunkable, SlidingWindow
from mlpug.pytorch.multi_processing import MultiProcessingMixin
class TrainingManager(MultiProcessingMixin, TrainingManagerBase):
def __init__(self, *args, sliding_window_factory=SlidingWindow, **kwargs):
super().__init__(*args, sliding_window_factory=sliding_window_factory, **kwargs)
def _training_ended(self):
if self.is_distributed:
# Wait for all processes to finish
dist.barrier()
class PTTrainerMixin(MultiProcessingMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def _activate_inference_mode(self, inference_mode):
if inference_mode:
self.training_model.eval()
else:
self.training_model.train()
def _get_model_state(self, model, model_name=None):
return model.state_dict()
def _get_optimizer_state(self, optimizer, optimizer_name=None):
return optimizer.state_dict()
def _set_model_state(self, model, state, model_name=None):
model.load_state_dict(state)
def _set_optimizer_state(self, optimizer, state, optimizer_name):
optimizer.load_state_dict(state)
class Trainer(PTTrainerMixin, TrainerBase):
pass
class DefaultTrainer(PTTrainerMixin, DefaultTrainerBase):
def __init__(self, *args, scaler=None, name="DefaultTrainer", **kwargs):
super(DefaultTrainer, self).__init__(*args, name=name, **kwargs)
self._scaler = scaler
if self.use_mixed_precision:
if scaler is None:
self._log.debug("Creating default scaler instance for automatic mixed precision ...")
self._scaler = torch.cuda.amp.GradScaler()
self._log.info(f"Using scaler instance for automatic mixed precision : {self._scaler}")
def set_learning_rate_for(self, optimizer_name, lr):
"""
Set learning rate for specific optimizer `optimizer_name` to `lr`
:param optimizer_name:
:param lr:
:return: True on success, else False
"""
optimizer = self.get_optimizer(optimizer_name)
if not hasattr(optimizer, 'param_groups'):
self._log.error(f"No valid optimizer available with name {optimizer_name}, unable to set learning rate")
return False
try:
for group in optimizer.param_groups:
group['lr'] = lr
except Exception as e:
_.log_exception(self._log, f"Unable to set learning rate for optimizer {optimizer_name}", e)
return False
self._log.debug(f"Learning rate of optimizer {optimizer_name} set to : {lr}")
return True
def evaluate_loss(self, batch_data, inference_mode, evaluate_settings=None):
if self.use_mixed_precision:
self._activate_inference_mode(inference_mode)
with autocast():
results = self._evaluate_loss(batch_data, evaluate_settings, inference_mode)
return normalize_evaluation(results)
else:
return super().evaluate_loss(batch_data, inference_mode, evaluate_settings)
def train_on(self, batch_data, training_settings=None):
"""
Use batch_data to perform a training iteration.
Optionally uses `batch_chunk_size` to evaluate the loss in chunks.
If a `batch_chunk_size` was given during construction of the trainer, the gradients are updated by evaluating
the batch in chunks.
*Note*
When using chunked batch processing, the default implementation assumes that the
loss, calculated over a chunk, is the average of the sample losses.
:param batch_data: batch_data object to train on (e.g. dict, list, tuple)
When `batch_chunk_size` is given, `batch_data` must be an object that implements the
`__len__` and `__getitem__` methods. Here the `__getitem__` method must be able to deal
with slices.
:param training_settings: optional training_settings object (usually dict)
:return: loss, auxiliary_results
loss : number (e.g. float)
auxiliary_results : can be anything, e.g dict or list with values or data items
"""
if not self.instance_valid():
raise TrainerInvalidException()
self._reset_gradients()
loss, auxiliary_results = self._calc_gradients(batch_data, training_settings=training_settings)
self._prepare_update_model_parameters()
self._update_model_parameters()
self._after_update_model_parameters()
return loss, auxiliary_results
def _reset_gradients(self):
for optimizer in self.get_optimizers().values():
optimizer.zero_grad()
def _calc_gradients(self, batch_data, training_settings=None):
"""
:param batch_data:
:type batch_data:
:param training_settings:
:type training_settings:
:return:
:rtype:
:raises LossNotAvailableException
"""
if not self.batch_chunk_size:
results = self.evaluate_loss(batch_data,
inference_mode=False,
evaluate_settings=training_settings)
if 'loss' not in results:
raise LossNotAvailableException()
loss = results['loss']
auxiliary_results = get_value_at('auxiliary_results', results, warn_on_failure=False)
self._back_propagate_from(loss)
else:
chunk_losses, chunk_aux_results, chunk_lengths = self._calc_gradients_chunked(batch_data, training_settings)
loss, auxiliary_results = self._combine_chunk_results(chunk_losses, chunk_aux_results, chunk_lengths)
return loss, auxiliary_results
def _calc_gradients_chunked(self, batch_data, training_settings=None):
"""
See `train_on` method.
This method slices the `batch_data` in slices of size `self.batch_chunk_size`. For each slice the loss is
calculated and the gradients are updated through back prop.
return: chunk_losses, chunk_aux_results, chunk_lengths
All three outputs are lists
"""
if not is_chunkable(batch_data):
raise BatchNotChunkableException()
chunk_losses = []
chunk_aux_results = BatchChunkingResults()
chunk_lengths = []
batch_size = len(batch_data)
num_chunks = math.ceil(batch_size / self.batch_chunk_size)
for chunk_idx in range(num_chunks):
chunk_start = chunk_idx*self.batch_chunk_size
chunk_end = min((chunk_idx+1)*self.batch_chunk_size, batch_size)
chunk_len = chunk_end-chunk_start
chunk = batch_data[chunk_start:chunk_end]
results = self.evaluate_loss(chunk, inference_mode=False, evaluate_settings=training_settings)
if 'loss' not in results:
raise LossNotAvailableException()
loss = results['loss']
aux_results = get_value_at('auxiliary_results', results, warn_on_failure=False)
# loss is assumed to be the average over the sample loss for the chunk
# Divide through batch size to factor in that this loss is part of a larger batch.
last_chunk = chunk_idx == (num_chunks-1)
self._back_propagate_from(chunk_len*loss/batch_size, last_chunk=last_chunk)
chunk_losses += [loss]
chunk_aux_results += [aux_results]
chunk_lengths += [chunk_len]
return chunk_losses, chunk_aux_results, chunk_lengths
def _combine_chunk_results(self, chunk_losses, chunk_aux_results, chunk_lengths):
"""
This default implementation assumes that the loss for a chunk is the average loss of all samples in the chunk.
There is no specific combination logic to combine the chunk auxiliary results
:returns loss, auxiliary_results
loss: weighted average of chunk losses
auxiliary_results: list of dicts:
[
...
{
"results": chunk aux. results,
"num_samples": num samples in chunk
}
...
]
"""
loss = reduce(lambda tot, c: tot+(c[1]*c[0]), zip(chunk_losses, chunk_lengths), 0)
num_samples = reduce(lambda tot, l: tot+l, chunk_lengths, 0)
loss /= num_samples
auxiliary_results = [{
"results": aux_results,
"num_samples": chunk_length
} for aux_results, chunk_length in zip(chunk_aux_results, chunk_lengths)]
auxiliary_results = BatchChunkingResults(auxiliary_results)
return loss, auxiliary_results
def _back_propagate_from(self, loss, last_chunk=False):
if self.use_mixed_precision:
self._scaler.scale(loss).backward()
else:
loss.backward()
def _prepare_update_model_parameters(self):
pass
def _update_model_parameters(self):
for optimizer in self.get_optimizers().values():
if self.use_mixed_precision:
self._scaler.step(optimizer)
else:
optimizer.step()
def _after_update_model_parameters(self):
if self.use_mixed_precision:
self._scaler.update()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
FileNum = int(sys.argv[1])
i = 1
File = 1
while (i<=FileNum):
try:
fr = open("Mining/"+str(i),"r")
line = fr.readline()
count = 1
content = line
while (len(line)>0):
line = fr.readline()
content = content + line
count += 1
if count>=6:
fw = open("data/"+str(File),"w")
fw.write(content)
File += 1
i += 1
except:
print "FileNum:",i
i +=1
if i>FileNum:
break
continue
|
decimals = range(0, 100)
my_range = decimals[3:40:3]
print(my_range == range(3, 40, 3))
print(range(0, 5, 2) == range(0, 6, 2))
print(list(range(0, 5, 2)))
print(list(range(0, 6, 2)))
r = range(0, 100)
print(r)
for i in r[::-2]:
print(i)
print()
for i in range(99, 0, -2):
print(i)
print()
print(range(0, 100)[::-2] == range(99, 0, -2))
print()
back_string = "egaugnal lufrewop yrev a si nohtyP"
print(back_string[::-1])
print()
r = range(0, 10)
for i in r[::-1]:
print(i)
# Challenge
o = range(0, 100, 4) # defines a sequence generation from 0 up to 100 by 4s
print(o) # range(0, 100, 4)
p = o[::5]
print(p) # range(0, 100, 20)
print(list(p)) # [0, 20, ... 80]
|
import tensorflow as tf
import tensorflow.contrib as tf_contrib
import numpy as np
from utils import pytorch_xavier_weight_factor, pytorch_kaiming_weight_factor
factor, mode, uniform = pytorch_kaiming_weight_factor(a=0.0, uniform=False)
weight_init = tf_contrib.layers.variance_scaling_initializer(factor=factor, mode=mode, uniform=uniform)
# weight_init = tf.random_normal_initializer(mean=0.0, stddev=0.02)
weight_regularizer = tf_contrib.layers.l2_regularizer(scale=0.0001)
weight_regularizer_fully = tf_contrib.layers.l2_regularizer(scale=0.0001)
##################################################################################
# Layer
##################################################################################
def conv(x, channels, kernel=4, stride=2, pad=0, pad_type='zero', use_bias=True, sn=False, scope='conv_0'):
with tf.variable_scope(scope):
if scope.__contains__("discriminator"):
weight_init = tf.random_normal_initializer(mean=0.0, stddev=0.02)
else:
weight_init = tf_contrib.layers.variance_scaling_initializer(factor=factor, mode=mode, uniform=uniform)
if pad > 0:
h = x.get_shape().as_list()[1]
if h % stride == 0:
pad = pad * 2
else:
pad = max(kernel - (h % stride), 0)
pad_top = pad // 2
pad_bottom = pad - pad_top
pad_left = pad // 2
pad_right = pad - pad_left
if pad_type == 'zero':
x = tf.pad(x, [[0, 0], [pad_top, pad_bottom], [pad_left, pad_right], [0, 0]])
if pad_type == 'reflect':
x = tf.pad(x, [[0, 0], [pad_top, pad_bottom], [pad_left, pad_right], [0, 0]], mode='REFLECT')
if sn:
w = tf.get_variable("kernel", shape=[kernel, kernel, x.get_shape()[-1], channels], initializer=weight_init,
regularizer=weight_regularizer)
x = tf.nn.conv2d(input=x, filter=spectral_norm(w),
strides=[1, stride, stride, 1], padding='VALID')
if use_bias:
bias = tf.get_variable("bias", [channels], initializer=tf.constant_initializer(0.0))
x = tf.nn.bias_add(x, bias)
else:
x = tf.layers.conv2d(inputs=x, filters=channels,
kernel_size=kernel, kernel_initializer=weight_init,
kernel_regularizer=weight_regularizer,
strides=stride, use_bias=use_bias)
return x
def fully_connected(x, units, use_bias=True, sn=False, scope='linear'):
with tf.variable_scope(scope):
x = flatten(x)
shape = x.get_shape().as_list()
channels = shape[-1]
if sn:
w = tf.get_variable("kernel", [channels, units], tf.float32,
initializer=weight_init, regularizer=weight_regularizer_fully)
if use_bias:
bias = tf.get_variable("bias", [units],
initializer=tf.constant_initializer(0.0))
x = tf.matmul(x, spectral_norm(w)) + bias
else:
x = tf.matmul(x, spectral_norm(w))
else:
x = tf.layers.dense(x, units=units, kernel_initializer=weight_init,
kernel_regularizer=weight_regularizer_fully,
use_bias=use_bias)
return x
def flatten(x) :
return tf.layers.flatten(x)
def GDWCT_block(content, style, style_mu, group_num) :
content = deep_whitening_transform(content) # [bs, h, w, ch]
U, style = deep_coloring_transform(style, group_num) # [bs, 1, ch], [bs, ch, ch]
bs, h, w, ch = content.get_shape().as_list()
content = tf.reshape(content, shape=[bs, h*w, ch])
x = tf.matmul(content, style) + style_mu
x = tf.reshape(x, shape=[bs, h, w, ch])
return x, U
def deep_whitening_transform(c) :
mu, _ = tf.nn.moments(c, axes=[1, 2], keep_dims=True)
x = c - mu
return x
def deep_coloring_transform(s, group_num) :
# [batch_size, 1, channel] : S : MLP^CT(s)
bs, _, ch = s.get_shape().as_list()
# make U
l2_norm = tf.norm(s, axis=-1, keepdims=True)
U = s / l2_norm
# make D
eye = tf.eye(num_rows=ch, num_columns=ch, batch_shape=[bs]) # [batch_size, channel, channel]
D = l2_norm * eye
U_block_list = []
split_num = ch // group_num
for i in range(group_num) :
U_ = U[:, :, i * split_num: (i + 1) * split_num]
D_ = D[:, i * split_num: (i + 1) * split_num, i * split_num: (i + 1) * split_num]
block_matrix = U_ * D_ * tf.transpose(U_, perm=[0, 2, 1])
operator_matrix = tf.linalg.LinearOperatorFullMatrix(block_matrix)
U_block_list.append(operator_matrix)
U_block_diag_matrix = tf.linalg.LinearOperatorBlockDiag(U_block_list).to_dense()
return U, U_block_diag_matrix
def group_wise_regularization(c_whitening, U_list, group_num) :
""" Regularization """
""" whitening regularization """
bs, h, w, ch = c_whitening.get_shape().as_list()
c_whitening = tf.reshape(c_whitening, shape=[bs, h * w, ch])
c_whitening = tf.matmul(tf.transpose(c_whitening, perm=[0, 2, 1]), c_whitening) # covariance of x [bs, ch, ch]
bs, ch, _ = c_whitening.get_shape().as_list() # ch1 = ch2
index_matrix = make_index_matrix(bs, ch, ch, group_num)
group_convariance_x = tf.where(tf.equal(index_matrix, 1.0), c_whitening, tf.zeros_like(c_whitening))
group_convariance_x = tf.linalg.set_diag(group_convariance_x, tf.ones([bs, ch]))
whitening_regularization_loss = L1_loss(c_whitening, group_convariance_x)
""" coloring regularization """
split_num = ch // group_num
coloring_regularization_list = []
coloring_regularization_loss_list = []
for U in U_list :
# [bs, 1, ch]
for i in range(group_num):
U_ = U[:, :, i * split_num: (i + 1) * split_num]
U_TU = tf.matmul(tf.transpose(U_, perm=[0, 2, 1]), U_) # [bs, ch // group_num, ch // group_num]
coloring_regularization_list.append(L1_loss(U_TU, tf.eye(num_rows=ch // group_num, num_columns=ch // group_num, batch_shape=[bs])))
coloring_regularization_loss_list.append(tf.reduce_mean(coloring_regularization_list))
coloring_regularization_loss = tf.reduce_mean(coloring_regularization_loss_list)
return whitening_regularization_loss, coloring_regularization_loss
def make_index_matrix(bs, ch1, ch2, group_num) :
index_matrix = np.abs(np.kron(np.eye(ch1 // group_num, ch2 // group_num), np.eye(group_num, group_num) - 1))
index_matrix[index_matrix == 0] = -1
index_matrix[index_matrix == 1] = 0
index_matrix[index_matrix == -1] = 1
index_matrix = np.tile(index_matrix, [bs, 1, 1])
return index_matrix
##################################################################################
# Residual-block
##################################################################################
def resblock(x_init, channels, use_bias=True, sn=False, scope='resblock'):
with tf.variable_scope(scope):
with tf.variable_scope('res1'):
x = conv(x_init, channels, kernel=3, stride=1, pad=1, pad_type='reflect', use_bias=use_bias, sn=sn)
x = instance_norm(x)
x = relu(x)
with tf.variable_scope('res2'):
x = conv(x, channels, kernel=3, stride=1, pad=1, pad_type='reflect', use_bias=use_bias, sn=sn)
x = instance_norm(x)
return x + x_init
def no_norm_resblock(x_init, channels, use_bias=True, sn=False, scope='resblock'):
with tf.variable_scope(scope):
with tf.variable_scope('res1'):
x = conv(x_init, channels, kernel=3, stride=1, pad=1, pad_type='reflect', use_bias=use_bias, sn=sn)
x = relu(x)
with tf.variable_scope('res2'):
x = conv(x, channels, kernel=3, stride=1, pad=1, pad_type='reflect', use_bias=use_bias, sn=sn)
return x + x_init
##################################################################################
# Sampling
##################################################################################
def down_sample(x) :
return tf.layers.average_pooling2d(x, pool_size=3, strides=2, padding='SAME')
def up_sample(x, scale_factor=2):
_, h, w, _ = x.get_shape().as_list()
new_size = [h * scale_factor, w * scale_factor]
return tf.image.resize_nearest_neighbor(x, size=new_size)
def up_sample_nearest(x, scale_factor=2):
_, h, w, _ = x.get_shape().as_list()
new_size = [h * scale_factor, w * scale_factor]
return tf.image.resize_nearest_neighbor(x, size=new_size)
def global_avg_pooling(x):
gap = tf.reduce_mean(x, axis=[1, 2], keepdims=True)
return gap
##################################################################################
# Activation function
##################################################################################
def lrelu(x, alpha=0.01):
# pytorch alpha is 0.01
return tf.nn.leaky_relu(x, alpha)
def relu(x):
return tf.nn.relu(x)
def tanh(x):
return tf.tanh(x)
##################################################################################
# Normalization function
##################################################################################
def instance_norm(x, scope='instance_norm'):
return tf_contrib.layers.instance_norm(x,
epsilon=1e-05,
center=True, scale=True,
scope=scope)
def layer_norm(x, scope='layer_norm') :
return tf_contrib.layers.layer_norm(x,
center=True, scale=True,
scope=scope)
def group_norm(x, groups=8, scope='group_norm'):
return tf.contrib.layers.group_norm(x, groups=groups, epsilon=1e-05,
center=True, scale=True,
scope=scope)
def spectral_norm(w, iteration=1):
w_shape = w.shape.as_list()
w = tf.reshape(w, [-1, w_shape[-1]])
u = tf.get_variable("u", [1, w_shape[-1]], initializer=tf.random_normal_initializer(), trainable=False)
u_hat = u
v_hat = None
for i in range(iteration):
"""
power iteration
Usually iteration = 1 will be enough
"""
v_ = tf.matmul(u_hat, tf.transpose(w))
v_hat = tf.nn.l2_normalize(v_)
u_ = tf.matmul(v_hat, w)
u_hat = tf.nn.l2_normalize(u_)
u_hat = tf.stop_gradient(u_hat)
v_hat = tf.stop_gradient(v_hat)
sigma = tf.matmul(tf.matmul(v_hat, w), tf.transpose(u_hat))
with tf.control_dependencies([u.assign(u_hat)]):
w_norm = w / sigma
w_norm = tf.reshape(w_norm, w_shape)
return w_norm
##################################################################################
# Loss function
##################################################################################
def discriminator_loss(gan_type, real, fake):
n_scale = len(real)
loss = []
real_loss = 0
fake_loss = 0
for i in range(n_scale) :
if gan_type == 'lsgan' :
real_loss = tf.reduce_mean(tf.squared_difference(real[i], 1.0))
fake_loss = tf.reduce_mean(tf.square(fake[i]))
if gan_type == 'gan' :
real_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.ones_like(real[i]), logits=real[i]))
fake_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.zeros_like(fake[i]), logits=fake[i]))
if gan_type == 'hinge':
real_loss = -tf.reduce_mean(tf.minimum(real[i][-1] - 1, 0.0))
fake_loss = -tf.reduce_mean(tf.minimum(-fake[i][-1] - 1, 0.0))
loss.append(real_loss + fake_loss)
return tf.reduce_sum(loss)
def generator_loss(gan_type, fake):
n_scale = len(fake)
loss = []
fake_loss = 0
for i in range(n_scale) :
if gan_type == 'lsgan' :
fake_loss = tf.reduce_mean(tf.squared_difference(fake[i], 1.0))
if gan_type == 'gan' :
fake_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.ones_like(fake[i]), logits=fake[i]))
if gan_type == 'hinge':
# fake_loss = -tf.reduce_mean(relu(fake[i][-1]))
fake_loss = -tf.reduce_mean(fake[i][-1])
loss.append(fake_loss)
return tf.reduce_sum(loss)
def L1_loss(x, y):
loss = tf.reduce_mean(tf.abs(x - y))
return loss
def regularization_loss(scope_name) :
"""
If you want to use "Regularization"
g_loss += regularization_loss('generator')
d_loss += regularization_loss('discriminator')
"""
collection_regularization = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
loss = []
for item in collection_regularization :
if scope_name in item.name :
loss.append(item)
return tf.reduce_sum(loss)
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import boto3
from moto import mock_s3
from airflow.models import DAG
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.ssh.operators.ssh import SSHOperator
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars
BUCKET = 'test-bucket'
S3_KEY = 'test/test_1_file.csv'
SFTP_PATH = '/tmp/remote_path.txt'
SFTP_CONN_ID = 'ssh_default'
S3_CONN_ID = 'aws_default'
SFTP_MOCK_FILE = 'test_sftp_file.csv'
S3_MOCK_FILES = 'test_1_file.csv'
TEST_DAG_ID = 'unit_tests_sftp_tos3_op'
DEFAULT_DATE = datetime(2018, 1, 1)
class TestSFTPToS3Operator(unittest.TestCase):
@mock_s3
def setUp(self):
hook = SSHHook(ssh_conn_id='ssh_default')
s3_hook = S3Hook('aws_default')
hook.no_host_key_check = True
dag = DAG(
TEST_DAG_ID + 'test_schedule_dag_once',
schedule_interval="@once",
start_date=DEFAULT_DATE,
)
self.hook = hook
self.s3_hook = s3_hook
self.ssh_client = self.hook.get_conn()
self.sftp_client = self.ssh_client.open_sftp()
self.dag = dag
self.s3_bucket = BUCKET
self.sftp_path = SFTP_PATH
self.s3_key = S3_KEY
@mock_s3
@conf_vars({('core', 'enable_xcom_pickling'): 'True'})
def test_sftp_to_s3_operation(self):
# Setting
test_remote_file_content = (
"This is remote file content \n which is also multiline "
"another line here \n this is last line. EOF"
)
# create a test file remotely
create_file_task = SSHOperator(
task_id="test_create_file",
ssh_hook=self.hook,
command=f"echo '{test_remote_file_content}' > {self.sftp_path}",
do_xcom_push=True,
dag=self.dag,
)
assert create_file_task is not None
create_file_task.execute(None)
# Test for creation of s3 bucket
conn = boto3.client('s3')
conn.create_bucket(Bucket=self.s3_bucket)
assert self.s3_hook.check_for_bucket(self.s3_bucket)
# get remote file to local
run_task = SFTPToS3Operator(
s3_bucket=BUCKET,
s3_key=S3_KEY,
sftp_path=SFTP_PATH,
sftp_conn_id=SFTP_CONN_ID,
s3_conn_id=S3_CONN_ID,
task_id='test_sftp_to_s3',
dag=self.dag,
)
assert run_task is not None
run_task.execute(None)
# Check if object was created in s3
objects_in_dest_bucket = conn.list_objects(Bucket=self.s3_bucket, Prefix=self.s3_key)
# there should be object found, and there should only be one object found
assert len(objects_in_dest_bucket['Contents']) == 1
# the object found should be consistent with dest_key specified earlier
assert objects_in_dest_bucket['Contents'][0]['Key'] == self.s3_key
# Clean up after finishing with test
conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key)
conn.delete_bucket(Bucket=self.s3_bucket)
assert not self.s3_hook.check_for_bucket(self.s3_bucket)
|
# Import Kratos core and apps
from KratosMultiphysics import *
from KratosMultiphysics.ShapeOptimizationApplication import *
# Additional imports
from KratosMultiphysics.KratosUnittest import TestCase
from gid_output_process import GiDOutputProcess
import mapper_factory as mapper_factory
import math
# =======================================================================================================
# Auxiliary functions
# =======================================================================================================
def OutputResults(model_part, file_name):
output_parameters = Parameters("""
{
"result_file_configuration" : {
"gidpost_flags" : {
"GiDPostMode" : "GiD_PostBinary",
"WriteDeformedMeshFlag" : "WriteDeformed",
"WriteConditionsFlag" : "WriteConditions",
"MultiFileFlag" : "SingleFile"
},
"file_label" : "step",
"output_control_type" : "step",
"output_frequency" : 1,
"nodal_results" : ["CONTROL_POINT_UPDATE","CONTROL_POINT_CHANGE","SHAPE_UPDATE","PRESSURE","AIR_PRESSURE","WATER_PRESSURE"]
},
"point_data_configuration" : []
}""")
gid_output_original = GiDOutputProcess(model_part, file_name, output_parameters)
gid_output_original.ExecuteInitialize()
gid_output_original.ExecuteBeforeSolutionLoop()
gid_output_original.ExecuteInitializeSolutionStep()
gid_output_original.PrintOutput()
gid_output_original.ExecuteFinalizeSolutionStep()
gid_output_original.ExecuteFinalize()
def Norm2OfVectorVariable(model_part, nodal_varible):
norm_2 = 0
for node in model_part.Nodes:
temp_vec = node.GetSolutionStepValue(nodal_varible)
norm_2 = norm_2 + temp_vec[0]*temp_vec[0] + temp_vec[1]*temp_vec[1] + temp_vec[2]*temp_vec[2]
return math.sqrt(norm_2)
def Norm2OfScalarVariable(model_part, nodal_varible):
norm_2 = 0
for node in model_part.Nodes:
temp_scalar = node.GetSolutionStepValue(nodal_varible)
norm_2 = norm_2 + temp_scalar*temp_scalar
return math.sqrt(norm_2)
# =======================================================================================================
# Set and read input data
# =======================================================================================================
# Import model parts
plate_with_trias = ModelPart("plate_with_trias")
plate_with_trias.AddNodalSolutionStepVariable(CONTROL_POINT_UPDATE)
plate_with_trias.AddNodalSolutionStepVariable(CONTROL_POINT_CHANGE)
plate_with_trias.AddNodalSolutionStepVariable(SHAPE_UPDATE)
plate_with_trias.AddNodalSolutionStepVariable(PRESSURE)
plate_with_trias.AddNodalSolutionStepVariable(WATER_PRESSURE)
plate_with_trias.AddNodalSolutionStepVariable(AIR_PRESSURE)
model_part_io = ModelPartIO("plate_with_trias")
model_part_io.ReadModelPart(plate_with_trias)
plate_with_quads = ModelPart("plate_with_quads")
plate_with_quads.AddNodalSolutionStepVariable(CONTROL_POINT_UPDATE)
plate_with_quads.AddNodalSolutionStepVariable(CONTROL_POINT_CHANGE)
plate_with_quads.AddNodalSolutionStepVariable(SHAPE_UPDATE)
model_part_io = ModelPartIO("plate_with_quads")
model_part_io.ReadModelPart(plate_with_quads)
# Set an input profile for the mapping variables (some saddle profile)
for node in plate_with_trias.Nodes:
tmp_vector = [0.1, 0, (0.5-node.X)*(0.5-node.Y)]
node.SetSolutionStepValue(CONTROL_POINT_UPDATE,tmp_vector)
for node in plate_with_trias.Nodes:
node.SetSolutionStepValue(PRESSURE,(0.5-node.X)*(0.5-node.Y))
# =======================================================================================================
# Perform tests
# =======================================================================================================
# Test matrix-free mapper
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 10000,
"matrix_free_filtering" : true
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_trias,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_result = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_result, 1.283132791556226, 12)
# Test matrix mapper
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_trias,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_result = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_result, 1.2831327915562258, 12)
# Test matrix mapper with consistent mapping
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000,
"consistent_mapping" : true
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_trias,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_result = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_result, 1.266374348187224, 12)
# Test rectangular matrix mapper
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_quads,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_results_quad = Norm2OfVectorVariable(plate_with_quads, CONTROL_POINT_CHANGE)
norm_2_results_tria = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_results_quad, 2.5408880662655733, 12)
TestCase().assertAlmostEqual(norm_2_results_tria, 4.48736454850266, 12)
# Test rectangular matrix mapper with matrix free mapper
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000,
"matrix_free_filtering" : true
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_quads,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_results_quad = Norm2OfVectorVariable(plate_with_quads, CONTROL_POINT_CHANGE)
norm_2_results_tria = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_results_quad, 2.5408880662655733, 12)
TestCase().assertAlmostEqual(norm_2_results_tria, 4.48736454850266, 12)
# Test improved integration
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000,
"improved_integration" : true,
"integration_method" : "gauss_integration",
"number_of_gauss_points" : 5
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_trias,mapper_settings)
matrix_mapper.Map(CONTROL_POINT_UPDATE,CONTROL_POINT_CHANGE)
matrix_mapper.InverseMap(CONTROL_POINT_CHANGE,SHAPE_UPDATE)
norm_2_result = Norm2OfVectorVariable(plate_with_trias, SHAPE_UPDATE)
TestCase().assertAlmostEqual(norm_2_result, 1.3164625011428233, 12)
# Test scalar mapping
mapper_settings = Parameters("""
{
"filter_function_type" : "linear",
"filter_radius" : 0.4,
"max_nodes_in_filter_radius" : 1000,
"matrix_free_filtering" : true
}""")
matrix_mapper = mapper_factory.CreateMapper(plate_with_trias,plate_with_trias,mapper_settings)
matrix_mapper.Map(PRESSURE,AIR_PRESSURE)
matrix_mapper.InverseMap(AIR_PRESSURE,WATER_PRESSURE)
norm_2_result = Norm2OfScalarVariable(plate_with_trias, WATER_PRESSURE)
TestCase().assertAlmostEqual(norm_2_result, 0.610521887077, 12)
# OutputResults(plate_with_trias,"results_tria_plate")
# OutputResults(plate_with_quads,"results_quad_plate")
# =======================================================================================================
# Clean folder
# =======================================================================================================
kratos_utilities.DeleteFileIfExisting("plate_with_trias.time")
kratos_utilities.DeleteFileIfExisting("plate_with_quads.time")
# =======================================================================================================
|
is_running_migration = False
|
# Generated by Django 3.2.1 on 2021-05-20 20:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_user_sub'),
]
operations = [
migrations.AddField(
model_name='user',
name='deactivated',
field=models.BooleanField(default=False, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='deactivated'),
),
]
|
# -*- coding: utf-8 -*-
import imagesource
import hashlib
import cv2
import tempfile
import os.path
import shutil
from nose.tools import eq_
files_template = 'tests/data/frames/%03d.jpg'
video = 'tests/data/MOV02522.MPG'
def test_files():
hashes_rgb = {}
hashes_bgr = {}
for i in range(10):
filename = files_template % i
img = cv2.imread(filename)
assert img is not None, 'Can''t load ' + filename
hashes_bgr[i] = hashlib.md5(img).hexdigest()
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.FilesSource(files_template)
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
images.color_conversion_from_bgr = None
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[0])
# tmp_dir = tempfile.TemporaryDirectory() # from Python 3.2
# tmp_dir.name
tmp_dir = tempfile.mkdtemp()
tmp_file_template = os.path.join(tmp_dir, '%03d.png')
images.write_images(tmp_file_template, 10)
for i in range(10):
filename = tmp_file_template % i
img = cv2.imread(filename)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[i])
shutil.rmtree(tmp_dir) # not needed with tempfile.TemporaryDirectory()
def test_video():
hashes_rgb = {}
hashes_bgr = {}
cap = cv2.VideoCapture(video)
for i in range(10):
retval, img = cap.read()
assert retval
hashes_bgr[i] = hashlib.md5(img).hexdigest()
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.VideoSource(video)
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
images.accurate_slow_seek = True
img = images.get_image(8)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8])
img = images.get_image(7)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[7])
images.accurate_slow_seek = False
img = images.get_image(8)
# eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8]) # the images may differ
img = images.get_image(7)
# eq_(hashlib.md5(img).hexdigest(), hashes_rgb[7]) # the images may differ
images.accurate_slow_seek = True
images.color_conversion_from_bgr = None
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[0])
# tmp_dir = tempfile.TemporaryDirectory() # from Python 3.2
# tmp_dir.name
tmp_dir = tempfile.mkdtemp()
tmp_file_template = os.path.join(tmp_dir, '%03d.png')
images.write_images(tmp_file_template, 10)
for i in range(10):
filename = tmp_file_template % i
img = cv2.imread(filename)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[i])
shutil.rmtree(tmp_dir) # not needed with tempfile.TemporaryDirectory()
def test_timedvideo():
images = imagesource.TimedVideoSource(video)
images.extract_timestamps()
assert images.timestamps_ms is not None
# def test_mass_timedvideo():
# import fnmatch
# import os
#
# matches = []
# for root, dirnames, filenames in os.walk('...somepath...'):
# for filename in fnmatch.filter(filenames, '*.avi'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.AVI'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.mp4'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.MP4'):
# matches.append(os.path.join(root, filename))
#
# for video_file in matches:
# print video_file
# images = imagesource.TimedVideoSource(video_file)
# images.extract_timestamps()
# assert images.timestamps_ms is not None
# print images.timestamps_ms[:30]
def test_synchronized():
hashes_rgb = {}
for i in range(10):
filename = files_template % i
img = cv2.imread(filename)
assert img is not None, 'Can''t load ' + filename
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.FilesSource(files_template)
frame_lookup_table = [0, 2, 4, 6, 8]
errors = [10, 20, 30, 40, 50]
images_synchronized = imagesource.SynchronizedSource(images, frame_lookup_table, errors)
img = images_synchronized.get_image(0)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
eq_(images_synchronized.get_synchronization_error(0), 10)
img = images_synchronized.get_image(1)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
eq_(images_synchronized.get_synchronization_error(1), 20)
img = images_synchronized.get_image(4)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8])
eq_(images_synchronized.get_synchronization_error(4), 50)
|
#!/usr/bin/env python
# Author: David Stelter, Andrew Jewett
# License: MIT License (See LICENSE.md)
# Copyright (c) 2017
# All rights reserved.
import os, sys, getopt
import datetime
__version__ = '0.3.0'
#################### UNITS ####################
# Only used with --units flag
econv = 1.0 # Additional Factor for unit conversion if needed (energies)
lconv = 1.0 # Additional Factor for unit conversion if neededa (lengths)
dconv = 1.0 # Additional Factor for unit conversion if neededa (densities)
###############################################
sys.stderr.write('\nEMC 2 LT conversion tool: v'+str(__version__)+'\n\n')
def helpme():
sys.stderr.write('Help for the EMC 2 LT conversion tool\n\n')
sys.stderr.write('Input takes a list of files in EMC .prm format to be read.\n')
sys.stderr.write('Additional styles (bond, angle, etc) can be modified via the\n'+
'command line. Any valid LAMMPS style can be used.\n\n')
sys.stderr.write('Styles include:\n')
sys.stderr.write('--pair-style=\n')
sys.stderr.write('--bond-style=\n')
sys.stderr.write('--angle-style=\n')
sys.stderr.write('--dihedral-style=\n')
sys.stderr.write('--improper-style=\n\n')
sys.stderr.write('Default styles are lj/cut/coul/long, harmonic, harmonic, harmonic,\n'+
'harmonic \n\n')
sys.stderr.write('Other commands:\n')
sys.stderr.write('--name= provides basename for output file if desired\n\n')
sys.stderr.write('--units flag for manual units (no parameter needed)\n\n')
sys.stderr.write('Usage example:\n')
sys.stderr.write('emcprm2lt.py file1 file2 --bond-style=harmonic --angle-style=harmonic\n')
sys.stderr.write('\n')
def Abort():
sys.stderr.write('Aborting...\n')
sys.exit()
def WriteInit():
# Write generic LAMMPS settings, likely need additional on a per-ff basis
foutput.write(' write_once("In Init") {\n')
foutput.write(' # Warning: This is a very generic "In Init" section, further\n')
foutput.write(' # modification prior to any simulation is extremely likely\n')
foutput.write(' units real\n')
foutput.write(' atom_style full\n')
foutput.write(' bond_style hybrid %s\n' % bstyle)
if angle_flag:
foutput.write(' angle_style hybrid %s\n' % astyle)
if torsion_flag:
foutput.write(' dihedral_style hybrid %s\n' % dstyle)
if improp_flag:
foutput.write(' improper_style hybrid %s\n' % istyle)
foutput.write(' pair_style hybrid %s %f %f\n' % (pstyle,
float(inner[0])*lconv, float(cutoff[0])*lconv))
if pair14[0] == 'OFF':
foutput.write(' special_bonds lj/coul 0.0 0.0 0.0\n')
else:
sys.stderr.write('Warning: special_bonds needed, add to "In Init" section\n\n')
foutput.write(' } # end init\n')
def Units(length_flag, energy_flag, density_flag):
# Check flags for all units, determine what conversions are needed, hard-coded for LAMMPS 'real'
sys.stderr.write('Attempting to auto-convert units... This should always be double-checked\n'+
' especially for unique potential styles\n')
global lconv; global econv; global dconv
if length_flag:
sys.stderr.write('Warning: Length scale does not match LAMMPS real units.\n'
' Attempting conversion to angstroms.\n')
if length[0] == 'NANOMETER':
lconv = 10.0
sys.stderr.write(' nanometer -> angstrom\n')
elif length[0] == 'MICROMETER':
lconv = 10000.0
sys.stderr.write(' micrometer -> angstrom\n')
elif length[0] == 'METER':
lconv = 10000000000.0
sys.stderr.write(' meter -> angstrom\n')
else:
sys.stderr.write('Length units NOT converted\n')
if energy_flag:
sys.stderr.write('Warning: Energy units do not match LAMMPS real units.\n'+
' Attempting conversion to kcal/mol.\n')
if energy[0] == 'KJ/MOL':
econv = 0.239006
sys.stderr.write(' kj/mol -> kcal/mol\n')
elif energy[0] == 'J/MOL':
econv = 0.000239006
sys.stderr.write(' j/mol -> kcal/mol\n')
elif energy[0] == 'CAL/MOL':
econv = 0.001
sys.stderr.write(' cal/mol -> kcal/mol\n')
else:
sys.stderr.write('Energy units NOT converted\n')
if density_flag:
sys.stderr.write('Warning: density units do not match LAMMPS real units.\n'+
' Attempting conversion to gram/cm^3\n')
if density[0] == 'KG/M^3':
dconv = 0.001
sys.stderr.write(' kg/m^3 -> g/cm^3\n')
else:
sys.stderr.write('Density units NOT converted\n')
return lconv, econv, dconv
def ChkPotential(manual_flag, angle_flag, torsion_flag, improp_flag):
# Check type of potential, determine type of unit conversion is necessary
global beconv
if angle_flag:
global aeconv
if torsion_flag:
global deconv
if improp_flag:
global ieconv
if manual_flag == False:
# Chk bond potential
if bstyle == '' or bstyle == 'harmonic':
beconv = econv / (2*pow(lconv,2))
else:
sys.stderr.write('Cannot find bond potential type, use manual units\n')
Abort()
if angle_flag:
if astyle == '' or astyle == 'harmonic':
aeconv = econv
elif astyle == 'cosine/squared':
aeconv = econv / 2
elif astyle == 'sdk':
aeconv = econv
else:
sys.stderr.write('Cannot find angle potential type, use manual units\n')
Abort()
# torsion and improper not implemented fully
elif torsion_flag:
if dstyle == '' or dstyle == 'harmonic':
deconv = econv
else:
sys.stderr.write('Cannot find torsion potential type, use manual units\n')
Abort()
elif improp_flag:
if istyle == '' or istyle == 'harmonic':
ieconv = econv
else:
sys.stderr.write('Cannot find improper potential type, use manual units\n')
Abort()
else:
# Modify as needed
sys.stderr.write('Warning: Manual units used, set potential conversion units in script\n')
beconv = 1
if angle_flag:
aeconv = 1
if torsion_flag:
deconv = 1
if improp_flag:
ieconv = 1
### Parse input ###
if len(sys.argv) == 1:
helpme()
sys.exit()
manual_units = False # Turned on via command line
args = list(sys.argv[1:])
myopts, args = getopt.gnu_getopt(args, 'fh', ['pair-style=', 'bond-style=', 'angle-style=',
'dihedral-style=', 'improper-style=', 'name=', 'units'])
filenames = list(args)
pstyle = ''; bstyle = ''; astyle = ''; dstyle = ''; istyle = ''
name = ''
for opt, arg in myopts:
if opt in ('-f'):
filenames = arg
elif opt in ('--pair-style'):
pstyle = arg
elif opt in ('--bond-style'):
bstyle = arg
elif opt in ('--angle-style'):
astyle = arg
elif opt in ('--dihedral-style'):
dstyle = arg
elif opt in ('--improper-style'):
istyle = arg
elif opt in ('--name'):
name = arg
elif opt in ('--units'):
manual_units = True
sys.stderr.write('Manual units enabled, modify python script accordingly')
elif opt in ('-h', '--help'):
helpme()
sys.exit()
### Check input filenames, make sure they exist ###
sys.stderr.write('Converting: \n')
for i in range(len(filenames)):
if os.path.isfile(filenames[i]):
sys.stderr.write('\n'+filenames[i]+'\n')
else:
sys.stderr.write('invalid filename:'+filenames[i]+'\n')
Abort()
sys.stderr.write('from EMC .prm to moltemplate .lt format\n\n')
### Open all files ###
f = [open(fname, 'r') for fname in filenames]
### All these settings from DEFINE should be list of fixed size ###
ffname = [[] for i in range(len(f))]
fftype = [[] for i in range(len(f))]
version = [[] for i in range(len(f))]
created1 = [[] for i in range(len(f))]
created2 = [[] for i in range(len(f))]
length = [[] for i in range(len(f))]
energy = [[] for i in range(len(f))]
density = [[] for i in range(len(f))]
mix = [[] for i in range(len(f))]
nbonded = [[] for i in range(len(f))]
inner = [[] for i in range(len(f))]
cutoff = [[] for i in range(len(f))]
pair14 = [[] for i in range(len(f))]
angle_def = [[] for i in range(len(f))]
torsion_def = [[] for i in range(len(f))]
improp_def = [[] for i in range(len(f))] # not all prm have this
### Parse DEFINE section, save info for each file ###
for i in range(len(f)):
grab = False
for line in f[i]:
if line.strip() == 'ITEM DEFINE':
grab = True
elif line.strip() == 'ITEM END':
grab = False
elif grab:
if line.startswith('FFNAME'):
ffname[i] = line.split()[1].strip()
if line.startswith('FFTYPE'):
fftype[i] = line.split()[1].strip()
if line.startswith('VERSION'):
version[i] = line.split()[1].strip()
if line.startswith('CREATED'):
created1[i] = line.split()[1].strip()
created2[i] = line.split()[2].strip()
if line.startswith('LENGTH'):
length[i] = line.split()[1].strip()
if line.startswith('ENERGY'):
energy[i] = line.split()[1].strip()
if line.startswith('DENSITY'):
density[i] = line.split()[1].strip()
if line.startswith('MIX'):
mix[i] = line.split()[1].strip()
if line.startswith('NBONDED'):
nbonded[i] = line.split()[1].strip()
if line.startswith('INNER'):
inner[i] = line.split()[1].strip()
if line.startswith('CUTOFF'):
cutoff[i] = line.split()[1].strip()
if line.startswith('PAIR14'):
pair14[i] = line.split()[1].strip()
if line.startswith('ANGLE'):
angle_def[i] = line.split()[1].strip()
if line.startswith('TORSION'):
torsion_def[i] = line.split()[1].strip()
if line.startswith('IMPROP'):
improp_def[i] = line.split()[1].strip()
### Sanity Checks ###
for i in range(len(f)):
for j in range(len(f)):
if ffname[j] != ffname[i]:
sys.stderr.write('force field files do not match\n')
Abort()
if length[j] != length[i]:
sys.stderr.write('units not identical between files\n')
Abort()
if energy[j] != energy[i]:
sys.stderr.write('units not identical between files\n')
Abort()
if density[j] != density[i]:
sys.stderr.write('units not identical between files\n')
Abort()
if inner[j] != inner[i]:
sys.stderr.write('inner cutoff not identical between files\n')
Abort()
if cutoff[j] != cutoff[i]:
sys.stderr.write('cutoff not identical between files\n')
Abort()
if pair14[j] != pair14[i]:
sys.stderr.write('1-4 pair interaction not consistent between files\n')
Abort()
### Check if sections exist in PRM file ###
angle_flag = False; torsion_flag = False; improp_flag = False
for i in range(len(f)):
if angle_def[i] == 'WARN':
angle_flag = True
if torsion_def[i] == 'WARN':
torsion_flag = True
if improp_def[i] == 'WARN':
improp_flag = True
### Check which units to use, trip convert flags ###
length_flag = False; energy_flag = False; density_flag = False
if length[0] != 'ANGSTROM':
length_flag = True
if energy[0] != 'KCAL/MOL':
energy_flag = True
if density[0] != 'G/CC':
density_flag = True
if manual_units == True:
length_flag = False
energy_flag = False
density_flag = False
Units(length_flag, energy_flag, density_flag)
### Read Whole File, save to lists ###
# Non-crucial sections include
# BONDS, ANGLE, TORSION, IMPROP, NONBOND
# Read all sections every time, only output sections when flags tripped
f = [open(fname, 'r') for fname in filenames]
masses = []; nonbond = []; bond = []; angle = []; torsion = []; improp = []
equiv = []
for i in range(len(f)):
MASS = False
NONBOND = False
BOND = False
ANGLE = False
TORSION = False
IMPROP = False
EQUIV = False
for line in f[i]:
if line.strip() == 'ITEM MASS':
MASS = True
elif line.strip() == 'ITEM END':
MASS = False
elif MASS:
if not line.startswith('#'):
if not line.startswith('\n'):
masses.append(line.strip().split())
if line.strip() == 'ITEM NONBOND':
NONBOND = True
elif line.strip() == 'ITEM END':
NONBOND = False
elif NONBOND:
if not line.startswith('#'):
if not line.startswith('\n'):
nonbond.append(line.strip().split())
if line.strip() == 'ITEM BOND':
BOND = True
elif line.strip() == 'ITEM END':
BOND = False
elif BOND:
if not line.startswith('#'):
if not line.startswith('\n'):
bond.append(line.strip().split())
if line.strip() == 'ITEM ANGLE':
ANGLE = True
elif line.strip() == 'ITEM END':
ANGLE = False
elif ANGLE:
if not line.startswith('#'):
if not line.startswith('\n'):
angle.append(line.strip().split())
if line.strip() == 'ITEM TORSION':
TORSION = True
elif line.strip() == 'ITEM END':
TORSION = False
elif TORSION:
if not line.startswith('#'):
if not line.startswith('\n'):
torsion.append(line.strip().split())
if line.strip() == 'ITEM IMPROP':
IMPROP = True
elif line.strip() == 'ITEM END':
IMPROP = False
elif IMPROP:
if not line.startswith('#'):
if not line.startswith('\n'):
improp.append(line.strip().split())
if line.strip() == 'ITEM EQUIVALENCE':
EQUIV = True
elif line.strip() == 'ITEM END':
EQUIV = False
elif EQUIV:
if not line.startswith('#'):
if not line.startswith('\n'):
equiv.append(line.strip().split())
### Close prm files ###
for fname in f:
fname.close()
### Sanity checks before writing LT files ###
# Check Equiv
for i in range(len(equiv)):
for j in range(len(equiv)):
if (equiv[i][0] == equiv[j][0]) and (equiv[i] != equiv[j]):
sys.stderr.write('Error: Identical atom types with different equivalences\n')
Abort()
# Check Masses
for i in range(len(masses)):
for j in range(len(masses)):
if (masses[i][0] == masses[j][0]) and (masses[i][1] != masses[j][1]):
sys.stderr.write('Error: Identical types with different mass\n')
Abort()
# Check Nonbond
for i in range(len(nonbond)):
for j in range(len(nonbond)):
if (nonbond[i][0] == nonbond[j][0]) and (nonbond[i][1] == nonbond[j][1]) and ((nonbond[i][2] != nonbond[j][2]) or (nonbond[i][3] != nonbond[j][3])):
sys.stderr.write(str(nonbond[i])+'\n'+str(nonbond[j])+'\n')
sys.stderr.write('Error: Identical types with different pair-interactions\n')
Abort()
### Remove double equivalences ###
for i in range(len(equiv)):
once = True
for j in range(len(equiv)):
if (equiv[i][0] == equiv[j][0]) and once:
once = False
elif (equiv[i][0] == equiv[j][0]):
equiv[j][1] = None
equiv[j][2] = 'duplicate'
if len(equiv[i]) < 6:
sys.stderr.write(str(equiv[i])+'\n')
sys.stderr.write('Warning: Incorrect equivalence formatting for type '+
str(equiv[i][0])+'\n'+
' Skipping type. Topology may not be complete.\n\n')
equiv[i][1] = None
equiv[i][2] = 'invalid_format'
### Check Potential Styles and Set Units ###
ChkPotential(manual_units, angle_flag, torsion_flag, improp_flag)
### Set output LT file ###
fname = 'ff_output.lt'
if name == '':
fname = ffname[0] + '.lt'
else:
fname = name + '.lt'
foutput = open(fname, 'w')
### Output to LT format ###
foutput.write('# Autogenerated by EMC 2 LT tool v'+
__version__+' on '+
str(datetime.date.today())+'\n')
foutput.write('#\n# ')
for i in range(len(sys.argv)):
foutput.write('%s ' % sys.argv[i])
foutput.write('\n')
foutput.write('#\n')
foutput.write('# Adapted from EMC by Pieter J. in \'t Veld\n')
foutput.write('# Originally written as, FFNAME:%s STYLE:%s VERSION:%s on %s %s\n' %
(ffname[0], fftype[0], version[0], created1[0], created2[0]))
foutput.write('\n')
foutput.write('%s {\n' % ffname[0])
# Charges not necessary? emc file assign charges in smiles, which would
# be in the per-molecule files created by moltemplate user... not here
### Mass Info ###
foutput.write(' write_once("Data Masses") {\n')
for i in range(len(masses)):
if equiv[i][1] != None:
foutput.write(' @atom:%s %f # %s\n' %
(masses[i][0], float(masses[i][1]), masses[i][0]))
foutput.write(' } # end of atom masses\n\n')
### Equiv Info ###
# Write Equivalence
foutput.write(' # ----- EQUIVALENCE CATEGORIES for bonded interaction lookup -----\n')
for i in range(len(equiv)):
if equiv[i][1] != None:
foutput.write(' replace{ @atom:%s @atom:%s_b%s_a%s_d%s_i%s}\n' %
(equiv[i][0], equiv[i][0], equiv[i][2], equiv[i][3], equiv[i][4], equiv[i][5]))
foutput.write(' # END EQUIVALENCE\n\n')
# Sanity check equivalences vs masses
for i in range(len(equiv)):
check = None
for j in range(len(masses)):
if equiv[i][0] == masses[j][0]:
check = 'success'
if check == None:
sys.stderr.write(str(equiv[i])+'\n'+str(masses[j])+'\n')
sys.stderr.write('Atom defined in Equivlances, but not found in Masses\n')
Abort()
# Sanity check masses vs equivalences
for i in range(len(masses)):
check = None
for j in range(len(masses)):
if masses[i][0] == equiv[j][0]:
check = 'success'
if check == None:
sys.stderr.write(str(masses[i])+'\n'+str(equiv[j])+'\n')
sys.stderr.write('Atom defined in Masses, but not found in Equivlances\n')
Abort()
### Nonbonded Info ###
if pstyle == '':
sys.stderr.write('Warning: no non-bonded potential provided, assuming lj/cut/coul/long\n')
pstyle = 'lj/cut/coul/long'
foutput.write(' write_once("In Settings") {\n')
foutput.write(' # ----- Non-Bonded interactions -----\n')
# Add new types from equivalence
for i in range(len(equiv)):
once = True
for j in range(len(nonbond)):
# Get terms for new types
if (equiv[i][0] != equiv[i][1]) and (equiv[i][1] == nonbond[j][0]):
if not equiv[i][1] == nonbond[j][1]:
line = '%s %s %s %s' % (equiv[i][0], nonbond[j][1], nonbond[j][2], nonbond[j][3])
nonbond.append(line.split())
if once:
once = False
line = '%s %s %s %s' % (equiv[i][0], equiv[i][0], nonbond[j][2], nonbond[j][3])
nonbond.append(line.split())
if (equiv[i][0] != equiv[i][1]) and (equiv[i][1] == nonbond[j][1]):
line = '%s %s %s %s' % (equiv[i][0], nonbond[j][0], nonbond[j][2], nonbond[j][3])
if line.split() != nonbond[-1]:
nonbond.append(line.split())
for i in range(len(nonbond)):
atom1name = None
atom2name = None
stylename = pstyle
if pstyle == 'lj/sdk' or pstyle == 'lj/sdk/coul/long':
stylename = 'lj%s_%s' % (nonbond[i][4], nonbond[i][5])
# Cross Terms + Diagonal, normal
for j in range(len(equiv)):
if nonbond[i][0] == equiv[j][0]:
atom1name = '%s_b%s_a%s_d%s_i%s' % (nonbond[i][0], equiv[j][2], equiv[j][3], equiv[j][4], equiv[j][5])
if nonbond[i][1] == equiv[j][0]:
atom2name = '%s_b%s_a%s_d%s_i%s' % (nonbond[i][1], equiv[j][2], equiv[j][3], equiv[j][4], equiv[j][5])
if atom1name == None or atom2name == None:
sys.stderr.write(str(atom1name)+'\n'+
str(atom2name)+'\n'+
str(nonbond[i])+'\n')
sys.stderr.write('Error: Atom in Nonbonded Pairs not found in Equivalences\n')
Abort()
foutput.write(' pair_coeff @atom:%s @atom:%s %s %f %f' %
(atom1name, atom2name, stylename, float(nonbond[i][3])*econv, float(nonbond[i][2])*lconv))
foutput.write(' # %s-%s\n' % (nonbond[i][0], nonbond[i][1]))
foutput.write(' } # end of nonbonded parameters\n\n')
### Bond Info ###
if bstyle == '':
sys.stderr.write('Warning: no bond potential provided, assuming harmonic\n')
bstyle == 'harmonic'
foutput.write(' write_once("In Settings") {\n')
foutput.write(' # ----- Bonds -----\n')
for i in range(len(bond)):
foutput.write(' bond_coeff @bond:%s-%s %s %f %f' %
(bond[i][0], bond[i][1], bstyle, float(bond[i][2])*beconv, float(bond[i][3])*lconv))
foutput.write(' # %s-%s\n' % (bond[i][0], bond[i][1]))
foutput.write(' }\n\n')
foutput.write(' write_once("Data Bonds By Type") {\n')
for i in range(len(bond)):
foutput.write(' @bond:%s-%s @atom:*_b%s_a*_d*_i* @atom:*_b%s_a*_d*_i*\n' %
(bond[i][0], bond[i][1], bond[i][0], bond[i][1]))
foutput.write(' } # end of bonds\n\n')
### Angle Info ###
if angle_flag:
if astyle == '':
sys.stderr.write('Warning: no angle potential provided, assuming harmonic\n')
astyle == 'harmonic'
foutput.write(' write_once("In Settings") {\n')
foutput.write(' # ----- Angles -----\n')
for i in range(len(angle)):
if (len(angle[i]) > 5): # Check if extra data in angle array
foutput.write(' angle_coeff @angle:%s-%s-%s %s %f %f' %
(angle[i][0], angle[i][1], angle[i][2], str(angle[i][5]), float(angle[i][3])*aeconv, float(angle[i][4])))
foutput.write(' # %s-%s-%s\n' % (angle[i][0], angle[i][1], angle[i][2]))
else:
foutput.write(' angle_coeff @angle:%s-%s-%s %s %f %f' %
(angle[i][0], angle[i][1], angle[i][2], astyle, float(angle[i][3])*aeconv, float(angle[i][4])))
foutput.write(' # %s-%s-%s\n' % (angle[i][0], angle[i][1], angle[i][2]))
foutput.write(' }\n\n')
foutput.write(' write_once("Data Angles By Type") {\n')
for i in range(len(angle)):
foutput.write(' @angle:%s-%s-%s @atom:*_b*_a%s_d*_i* @atom:*_b*_a%s_d*_i* @atom:*_b*_a%s_d*_i*\n' %
(angle[i][0], angle[i][1], angle[i][2], angle[i][0], angle[i][1], angle[i][2]))
foutput.write(' } # end of angles\n\n')
### Torsion/Dihedral Info ###a
# Incomplete
if torsion_flag:
if dstyle == '':
sys.stderr.write('Warning: no dihedral/torsion potential provided, assuming harmonic\n')
dstyle == 'harmonic'
foutput.write(' write_once("In Settings") {\n')
foutput.write(' # ----- Dihedrals -----\n')
for i in range(len(torsion)):
foutput.write(' dihedral_coeff @dihedral:%s-%s-%s-%s %s %f %f %f %f\n' %
(torsion[i][0], torsion[i][1], torsion[i][2], torsion[i][3], dstyle, float(torsion[i][4])*deconv, float(torsion[i][5]), float(torsion[i][6])))
foutput.write(' }\n\n')
foutput.write(' write_once("Data Dihedrals By Type") {\n')
for i in range(len(torsion)):
foutput.write(' @dihedral:%s-%s-%s-%s @atom:*_b*_a*_d%s_i* @atom:*_b*_a*_d%s_i* @atom:*_b*_a*_d%s_i* @atom:*_b*_a*_d%s_i*' %
(torsion[i][0], torsion[i][1], torsion[i][2], torsion[i][3], torsion[i][0], torsion[i][1], torsion[i][2], torsion[i][3]))
foutput.write(' } # end of dihedrals\n\n')
### Improper Info ###
# Incomplete
ieconv = econv # improper coeff conversion
if improp_flag:
if istyle == '':
sys.stderr.write('Warning: no improper potential provided, assuming harmonic\n')
istyle == 'harmonic'
foutput.write(' write_once("In Settings") {\n')
foutput.write(' # ----- Impropers -----\n')
# As discussed, a check for convention of impropers is probably needed here
for i in range(len(improp)):
foutput.write(' improper_coeff @improper:%s-%s-%s-%s %s %f %f\n' %
(improp[i][0], improp[i][1], improp[i][2], improp[i][3], istyle,
float(improp[i][4]), float(improp[i][5])))
foutput.write(' }\n\n')
foutput.write(' write_once("Data Impropers By Type") {\n')
for i in range(len(improp)):
foutput.write(' @improper:%s-%s-%s-%s @atom:*_b*_a*_d*_i%s @atom:*_b*_a*_d*_i%s @atom:*_b*_a*_d*_i%s @atom:*_b*_a*_d*_i%s' %
(improp[i][0], improp[i][1], improp[i][2], improp[i][3], improp[i][0], improp[i][1], improp[i][2], improp[i][3]))
foutput.write(' } # end of impropers\n\n')
### Initialization Info ###
sys.stderr.write('Warning: Attempting to write generic "In Init" section,\n'+
' Further modification after this script is extremely likely.\n')
WriteInit()
sys.stderr.write('Warning: The EQUIVALENCES section of the PRM files may have been converted\n'
' incorrectly. Please check the "replace {}" statements for validity.\n'
' (This conversion script has only been tested on a couple force fields\n'
' which use a specific format. You must verify the conversion.)\n')
foutput.write('} # %s\n' % ffname[0])
sys.exit()
|
import numpy
import numpy as np
from Autonomous.Sensors.LIDAR import LIDAR_Interface,Utils
from threading import *
class Obstacle_Detection(Thread):
def __init__(self, lidar: LIDAR_Interface.LIDAR_Interface, min_distance: int=0, max_distance: int=5000):
self._lidar = lidar
if not self._lidar.running:
self._lidar.start()
self._iter_scan = self._lidar.iter_scans(self.__samples_per_rev)
self._lidar.min_distance = min_distance
self._lidar.max_distance = max_distance
super(Obstacle_Detection, self).__init__()
self.__running = False
self.__obstacle_flag = False
self.__obstacle = np.array([0,0]) # this can be an np array if need be
def stop_thread(self):
self.__running = True
def exit_func(self):
self.stop_thread()
self._lidar.exit_func()
def zero_sensor(self):
# needs an algorithm to find the center of the sensor
pass
def clear_obstacle_flag(self):
self.__obstacle_flag = False
self.__obstacle = (0, 0) # reset the __obstacle var
def range_filter(self, scans, min_distance, max_distance): # not sure if there needs to be a self here?
x = scans[:, 1]
for i in range(len(x)):
if x[i] < min_distance:
x[i] = 0
elif x[i] > max_distance:
x[i] = 0
x = np.asarray(x)
x = np.transpose(x)
return x
def segmentation(self, scans, seg_threshold): # not sure if there needs to be a self here?
i = 1 # incremental num
temp_val = scans[:, 1]
segment = np.zeros((len(temp_val), 3))
segment[:, 0] = temp_val
x = [temp_val[len(temp_val) - 1]]
np.asarray(x)
temp_val = np.append(temp_val, x, axis=0)
segment[:, 1] = abs(np.diff(temp_val, axis=0))
# conditions where segment threshold > 20 mm, can be changed
cond_1 = segment[:, 1] > seg_threshold
check = np.where(cond_1, 2, 1) # check where its true or false
check = check.reshape(-1, 1)
iter_seg = 1
for k in range(len(check)):
if check[k] == 2: # true
iter_seg = iter_seg + 1 # iterate to next segment
check[k] = iter_seg
elif check[k] == 1: # false: diff between 2 distances is less than threshold
check[k] = iter_seg # same segment
segment[:, 2] = check[:, 0]
segment = segment[:, 2].reshape(-1, 1)
return segment
# class properties
# @property
# def obstacle_detected_flag(self):
# return self.__obstacle_flag
@property
def detected_obstacle(self): # returns [angle, dist, segment number]
return self.__obstacle
@property
def max_distance(self):
return self._lidar.max_distance
@max_distance.setter
def max_distance(self, distance):
self._lidar.max_distance = distance
@property
def min_distance(self):
return self._lidar.min_distance
@min_distance.setter
def min_distance(self, distance):
self._lidar.min_distance = distance
# thread functions
def start(self) -> None:
self.__running = True
if not self._lidar.running:
self._lidar.start()
def run(self) -> None:
while self.__running:
# run the obstacle detection algorithm
scan = next(self._iter_scan)
__obstacle = np.array([(np.radians(point[1]), point[2]) for point in scan])
x = self.range_filter(__obstacle, 0, 5000)
__obstacle[:, 1] = x
__obstacle = __obstacle[np.all(__obstacle != 0, axis=1)] # removes rows with 0s
segment = self.segmentation(__obstacle, 20) # distance threshold function
# add segment value column to offset array to plot
__obstacle = np.append(__obstacle, segment, axis=1)
pass
|
from tests.modules.FlaskModule.API.BaseAPITest import BaseAPITest
from datetime import datetime
from websocket import create_connection
import ssl
class DeviceQueryStatusTest(BaseAPITest):
login_endpoint = '/api/device/login'
test_endpoint = '/api/device/status'
devices = []
def setUp(self):
# Query all enabled devices
params = {}
response = self._request_with_http_auth('admin', 'admin', params, '/api/user/devices')
self.assertEqual(response.status_code, 200)
self.devices = response.json()
self.assertGreater(len(self.devices), 0)
def tearDown(self):
pass
def _login_device(self, token: str, should_fail=False):
result = self._login_with_token(token)
if should_fail:
self.assertNotEqual(200, result.status_code)
else:
self.assertEqual(200, result.status_code)
return result.json()
def test_send_status_with_no_payload_should_fail(self):
for dev in self.devices:
self.assertTrue('device_onlineable' in dev)
self.assertTrue('device_enabled' in dev)
self.assertTrue('device_token' in dev)
if dev['device_enabled']:
answer = self._post_with_token(dev['device_token'], payload=None)
self.assertEqual(400, answer.status_code)
def test_malformed_status_should_fail(self):
for dev in self.devices:
self.assertTrue('device_onlineable' in dev)
self.assertTrue('device_enabled' in dev)
self.assertTrue('device_token' in dev)
if dev['device_enabled']:
device_status = {
'wrong_status': {'field': True},
'timestamp': datetime.now().timestamp()
}
answer = self._post_with_token(dev['device_token'], payload=device_status)
self.assertEqual(400, answer.status_code)
def test_send_status_with_disabled_devices_should_fail(self):
for dev in self.devices:
self.assertTrue('device_onlineable' in dev)
self.assertTrue('device_enabled' in dev)
self.assertTrue('device_token' in dev)
if not dev['device_enabled']:
device_status = {
'status': {'field': True},
'timestamp': datetime.now().timestamp()
}
answer = self._post_with_token(dev['device_token'], payload=device_status)
self.assertEqual(401, answer.status_code)
def test_send_status_with_offline_devices_should_fail(self):
for dev in self.devices:
self.assertTrue('device_token' in dev)
if dev['device_onlineable'] and dev['device_enabled']:
device_status = {
'status': {'field': True},
'timestamp': datetime.now().timestamp()
}
answer = self._post_with_token(dev['device_token'], payload=device_status)
self.assertNotEqual(200, answer.status_code)
def test_send_status_with_wrong_payload_online_device_should_fail(self):
for dev in self.devices:
self.assertTrue('device_token' in dev)
if dev['device_onlineable'] and dev['device_enabled']:
login_info = self._login_device(dev['device_token'])
self.assertTrue('websocket_url' in login_info)
# Connect websocket, not verifying ssl
ws = create_connection(login_info['websocket_url'], sslopt={'cert_reqs': ssl.CERT_NONE})
self.assertTrue(ws.connected)
device_status = {
'wrong_status': {'field': True},
'timestamp': datetime.now().timestamp()
}
answer = self._post_with_token(dev['device_token'], payload=device_status)
self.assertEqual(400, answer.status_code)
def test_send_status_with_good_payload_online_device_should_work(self):
for dev in self.devices:
self.assertTrue('device_token' in dev)
if dev['device_onlineable'] and dev['device_enabled']:
login_info = self._login_device(dev['device_token'])
self.assertTrue('websocket_url' in login_info)
# Connect websocket, not verifying ssl
ws = create_connection(login_info['websocket_url'], sslopt={'cert_reqs': ssl.CERT_NONE})
self.assertTrue(ws.connected)
device_status = {
'status': {'field': True},
'timestamp': datetime.now().timestamp()
}
answer = self._post_with_token(dev['device_token'], payload=device_status)
self.assertEqual(200, answer.status_code)
result = answer.json()
uuid = result['uuid']
self.assertEqual(dev['device_uuid'], uuid)
del result['uuid']
# self.assertEqual(result, device_status)
ws.close()
|
""" Contains the classes that manages Las PointRecords
Las PointRecords are represented using Numpy's structured arrays,
The PointRecord classes provide a few extra things to manage these arrays
in the context of Las point data
"""
import logging
from typing import NoReturn
import numpy as np
from . import dims
from .dims import ScaledArrayView
from .. import errors
from ..point import PointFormat
logger = logging.getLogger(__name__)
def scale_dimension(array_dim, scale, offset):
return (array_dim * scale) + offset
def unscale_dimension(array_dim, scale, offset):
return np.round((np.array(array_dim) - offset) / scale)
def raise_not_enough_bytes_error(
expected_bytes_len, missing_bytes_len, point_data_buffer_len, points_dtype
) -> NoReturn:
raise errors.PylasError(
"The file does not contain enough bytes to store the expected number of points\n"
"expected {} bytes, read {} bytes ({} bytes missing == {} points) and it cannot be corrected\n"
"{} (bytes) / {} (point_size) = {} (points)".format(
expected_bytes_len,
point_data_buffer_len,
missing_bytes_len,
missing_bytes_len / points_dtype.itemsize,
point_data_buffer_len,
points_dtype.itemsize,
point_data_buffer_len / points_dtype.itemsize,
)
)
class PackedPointRecord:
"""
In the PackedPointRecord, fields that are a combinations of many sub-fields (fields stored on less than a byte)
are still packed together and are only de-packed and re-packed when accessed.
This uses of less memory than if the sub-fields were unpacked
>>> #return number is a sub-field
>>> from pylas import PointFormat
>>> packed_point_record = PackedPointRecord.zeros(PointFormat(0), 10)
>>> return_number = packed_point_record['return_number']
>>> return_number
<SubFieldView([0 0 0 0 0 0 0 0 0 0])>
>>> return_number[:] = 1
>>> np.alltrue(packed_point_record['return_number'] == 1)
True
"""
def __init__(self, data: np.ndarray, point_format: PointFormat):
self.array = data
self.point_format = point_format
self.sub_fields_dict = dims.get_sub_fields_dict(point_format.id)
@property
def point_size(self):
"""Returns the point size in bytes taken by each points of the record
Returns
-------
int
The point size in byte
"""
return self.array.dtype.itemsize
@classmethod
def zeros(cls, point_format, point_count):
"""Creates a new point record with all dimensions initialized to zero
Parameters
----------
point_format: PointFormat
The point format id the point record should have
point_count : int
The number of point the point record should have
Returns
-------
PackedPointRecord
"""
data = np.zeros(point_count, point_format.dtype())
return cls(data, point_format)
@classmethod
def empty(cls, point_format):
"""Creates an empty point record.
Parameters
----------
point_format: pylas.PointFormat
The point format id the point record should have
Returns
-------
PackedPointRecord
"""
return cls.zeros(point_format, point_count=0)
@classmethod
def from_point_record(
cls, other_point_record: "PackedPointRecord", new_point_format: PointFormat
) -> "PackedPointRecord":
"""Construct a new PackedPointRecord from an existing one with the ability to change
to point format while doing so
"""
array = np.zeros_like(other_point_record.array, dtype=new_point_format.dtype())
new_record = cls(array, new_point_format)
new_record.copy_fields_from(other_point_record)
return new_record
@classmethod
def from_buffer(cls, buffer, point_format, count, offset=0):
points_dtype = point_format.dtype()
data = np.frombuffer(buffer, dtype=points_dtype, offset=offset, count=count)
return cls(data, point_format)
def copy_fields_from(self, other_record: "PackedPointRecord") -> None:
"""Tries to copy the values of the current dimensions from other_record"""
for dim_name in self.point_format.dimension_names:
try:
self[dim_name] = np.array(other_record[dim_name])
except ValueError:
pass
def memoryview(self) -> memoryview:
return memoryview(self.array)
def resize(self, new_size: int) -> None:
size_diff = new_size - len(self.array)
if size_diff > 0:
self.array = np.append(
self.array, np.zeros(size_diff, dtype=self.array.dtype)
)
elif size_diff < 0:
self.array = self._array[:new_size].copy()
def _append_zeros_if_too_small(self, value):
"""Appends zeros to the points stored if the value we are trying to
fit is bigger
"""
size_diff = len(value) - len(self.array)
if size_diff > 0:
self.resize(size_diff)
def __eq__(self, other):
return self.point_format == other.point_format and np.all(
self.array == other.array
)
def __len__(self):
return self.array.shape[0]
def __getitem__(self, item):
"""Gives access to the underlying numpy array
Unpack the dimension if item is the name a sub-field
"""
if isinstance(item, (int, slice, np.ndarray)):
return PackedPointRecord(self.array[item], self.point_format)
# 1) Is it a sub field ?
try:
composed_dim, sub_field = self.sub_fields_dict[item]
return dims.SubFieldView(self.array[composed_dim], sub_field.mask)
except KeyError:
pass
# 2) Is it a Scaled Extra Byte Dimension ?
try:
dim_info = self.point_format.dimension_by_name(item)
if dim_info.is_standard is False:
if dim_info.scales is not None or dim_info.offsets is not None:
scale = (
np.ones(dim_info.num_elements, np.float64)
if dim_info.scales is None
else dim_info.scales[: dim_info.num_elements]
)
offset = (
np.zeros(dim_info.num_elements, np.float64)
if dim_info.offsets is None
else dim_info.offsets[: dim_info.num_elements]
)
return ScaledArrayView(self.array[item], scale, offset)
except ValueError:
pass
return self.array[item]
def __setitem__(self, key, value):
"""Sets elements in the array"""
self._append_zeros_if_too_small(value)
if isinstance(key, str):
self[key][:] = value
else:
self.array[key] = value
def __getattr__(self, item):
try:
return self[item]
except ValueError:
raise AttributeError("{} is not a valid dimension".format(item)) from None
def __repr__(self):
return "<{}(fmt: {}, len: {}, point size: {})>".format(
self.__class__.__name__,
self.point_format,
len(self),
self.point_format.size,
)
def apply_new_scaling(record, scales: np.ndarray, offsets: np.ndarray) -> None:
record["X"] = unscale_dimension(np.asarray(record.x), scales[0], offsets[0])
record["Y"] = unscale_dimension(np.asarray(record.y), scales[1], offsets[1])
record["Z"] = unscale_dimension(np.asarray(record.x), scales[2], offsets[2])
class ScaleAwarePointRecord(PackedPointRecord):
def __init__(self, array, point_format, scales, offsets):
super().__init__(array, point_format)
self.scales = scales
self.offsets = offsets
def change_scaling(self, scales=None, offsets=None) -> None:
if scales is not None:
self.scales = scales
if offsets is not None:
self.offsets = offsets
apply_new_scaling(self, scales, offsets)
self.scales = scales
self.offsets = offsets
def __getitem__(self, item):
if isinstance(item, (slice, np.ndarray)):
return ScaleAwarePointRecord(
self.array[item], self.point_format, self.scales, self.offsets
)
if item == "x":
return ScaledArrayView(self.array["X"], self.scales[0], self.offsets[0])
elif item == "y":
return ScaledArrayView(self.array["Y"], self.scales[1], self.offsets[1])
elif item == "z":
return ScaledArrayView(self.array["Z"], self.scales[2], self.offsets[2])
else:
return super().__getitem__(item)
|
from wingedsheep.carcassonne.objects.connection import Connection
from wingedsheep.carcassonne.objects.farmer_connection import FarmerConnection
from wingedsheep.carcassonne.objects.farmer_side import FarmerSide
from wingedsheep.carcassonne.objects.side import Side
class SideModificationUtil:
@classmethod
def turn_side(cls, side: Side, times: int) -> Side:
result: Side
if times == 0:
return side
if side == Side.TOP:
result = Side.RIGHT
elif side == Side.RIGHT:
result = Side.BOTTOM
elif side == Side.BOTTOM:
result = Side.LEFT
elif side == Side.LEFT:
result = Side.TOP
elif side == Side.CENTER:
result = Side.CENTER
elif side == Side.TOP_LEFT:
result = Side.TOP_RIGHT
elif side == Side.TOP_RIGHT:
result = Side.BOTTOM_RIGHT
elif side == Side.BOTTOM_RIGHT:
result = Side.BOTTOM_LEFT
else: # side == Side.BOTTOM_LEFT
result = Side.TOP_LEFT
if times > 1:
return cls.turn_side(result, times - 1)
return result
@classmethod
def opposite_side(cls, side: Side):
return cls.turn_side(side, 2)
@classmethod
def turn_sides(cls, sides: [Side], times: int):
return list(map(lambda side: cls.turn_side(side, times), sides))
@classmethod
def turn_farmer_side(cls, farmer_side: FarmerSide, times: int) -> FarmerSide:
result: FarmerSide
if times == 0:
return farmer_side
if farmer_side == FarmerSide.TLL:
result = FarmerSide.TRT
elif farmer_side == FarmerSide.TLT:
result = FarmerSide.TRR
elif farmer_side == FarmerSide.TRT:
result = FarmerSide.BRR
elif farmer_side == FarmerSide.TRR:
result = FarmerSide.BRB
elif farmer_side == FarmerSide.BRR:
result = FarmerSide.BLB
elif farmer_side == FarmerSide.BRB:
result = FarmerSide.BLL
elif farmer_side == FarmerSide.BLB:
result = FarmerSide.TLL
else: # farmer_side == FarmerSide.BLL:
result = FarmerSide.TLT
if times > 1:
return cls.turn_farmer_side(result, times - 1)
return result
@classmethod
def turn_farmer_sides(cls, farmer_sides: [FarmerSide], times: int) -> [FarmerSide]:
return list(map(lambda farmer_side: cls.turn_farmer_side(farmer_side, times), farmer_sides))
@classmethod
def opposite_farmer_side(cls, farmer_side: FarmerSide) -> FarmerSide:
if farmer_side == FarmerSide.TLL:
return FarmerSide.TRR
elif farmer_side == FarmerSide.TLT:
return FarmerSide.BLB
elif farmer_side == FarmerSide.TRT:
return FarmerSide.BRR
elif farmer_side == FarmerSide.TRR:
return FarmerSide.TLL
elif farmer_side == FarmerSide.BRR:
return FarmerSide.BLL
elif farmer_side == FarmerSide.BRB:
return FarmerSide.TRT
elif farmer_side == FarmerSide.BLB:
return FarmerSide.TLT
else: # farmer_side == FarmerSide.BLL:
return FarmerSide.BRR
@classmethod
def turn_farmer_connection(cls, farmer_connection: FarmerConnection, times: int):
return FarmerConnection(
farmer_positions=cls.turn_sides(farmer_connection.farmer_positions, times),
tile_connections=cls.turn_farmer_sides(farmer_connection.tile_connections, times),
city_sides=cls.turn_sides(farmer_connection.city_sides, times)
)
@classmethod
def turn_connection(cls, connection: Connection, times: int) -> Connection:
return Connection(cls.turn_side(connection.a, times), cls.turn_side(connection.b, times))
|
from __future__ import unicode_literals
from datetime import datetime, date, timedelta
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import Group
from django.contrib.auth.tokens import default_token_generator
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.mail import send_mail
from django.db.models import Q, Max
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic import TemplateView, ListView, DetailView, CreateView, UpdateView, DeleteView, FormView
from django.urls import reverse_lazy
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from extra_views import ModelFormSetView
from itertools import chain
import pdfkit
import re
from actions.models import Action
from applications import forms as apps_forms
from applications.models import (
Application, Referral, Condition, Compliance, Vessel, Location, Record, PublicationNewspaper,
PublicationWebsite, PublicationFeedback, Communication, Delegate, OrganisationContact, OrganisationPending, OrganisationExtras, CommunicationAccount,CommunicationOrganisation, ComplianceGroup,CommunicationCompliance, StakeholderComms, ApplicationLicenceFee, Booking, DiscountReason,BookingInvoice)
from applications.workflow import Flow
from applications.views_sub import Application_Part5, Application_Emergency, Application_Permit, Application_Licence, Referrals_Next_Action_Check, FormsList
from applications.email import sendHtmlEmail, emailGroup, emailApplicationReferrals
from applications.validationchecks import Attachment_Extension_Check, is_json
from applications.utils import get_query, random_generator
from applications import utils
from ledger.accounts.models import EmailUser, Address, Organisation, Document, OrganisationAddress, PrivateDocument
from approvals.models import Approval, CommunicationApproval
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import math
from django.shortcuts import redirect
from django.template import RequestContext
from django.template.loader import get_template
from statdev.context_processors import template_context
import json
import os.path
from applications.views_pdf import PDFtool
import mimetypes
from django.middleware.csrf import get_token
from django.shortcuts import render, get_object_or_404, redirect
from decimal import Decimal
from ledger.payments.models import Invoice
from oscar.apps.order.models import Order
from ledger.basket.models import Basket
from applications.invoice_pdf import create_invoice_pdf_bytes
from ledger.payments.mixins import InvoiceOwnerMixin
from django.views.generic.base import View, TemplateView
import pathlib
from django.core.files.base import ContentFile
from django.utils.crypto import get_random_string
import base64
import requests
class HomePage(TemplateView):
# preperation to replace old homepage with screen designs..
template_name = 'applications/home_page.html'
def render_to_response(self, context):
if self.request.user.is_authenticated:
if len(self.request.user.first_name) > 0 and self.request.user.identification2 is not None:
donothing = ''
else:
return HttpResponseRedirect(reverse('first_login_info_steps', args=(self.request.user.id,1)))
template = get_template(self.template_name)
#context = RequestContext(self.request, context)
context['csrf_token_value'] = get_token(self.request)
return HttpResponse(template.render(context))
def get_context_data(self, **kwargs):
context = super(HomePage, self).get_context_data(**kwargs)
context = template_context(self.request)
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
# mypdf = MyPDF()
# mypdf.get_li()
#pdftool = PDFtool()
#pdftool.generate_part5()
#pdftool.generate_permit()
#pdftool.generate_section_84()
#pdftool.generate_licence()
context['referee'] = 'no'
referee = Group.objects.get(name='Statdev Referee')
if referee in self.request.user.groups.all():
context['referee'] = 'yes'
# Have to manually populate when using render_to_response()
context['messages'] = messages.get_messages(self.request)
context['request'] = self.request
context['user'] = self.request.user
fl = FormsList()
if 'action' in self.kwargs:
action = self.kwargs['action']
else:
action = ''
if self.request.user.is_authenticated:
if action == '':
context = fl.get_application(self,self.request.user.id,context)
context['home_nav_other_applications'] = 'active'
elif action == 'approvals':
context = fl.get_approvals(self,self.request.user.id,context)
context['home_nav_other_approvals'] = 'active'
elif action == 'clearance':
context = fl.get_clearance(self,self.request.user.id,context)
context['home_nav_other_clearance'] = 'active'
elif action == 'referrals':
context['home_nav_other_referral'] = 'active'
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
for se_wo in query_str_split:
search_filter &= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
context['items'] = Referral.objects.filter(referee=self.request.user).exclude(status=5).order_by('-id')
else:
donothing =''
#for i in Application.APP_TYPE_CHOICES:
# if i[0] in [4,5,6,7,8,9,10,11]:
# skip = 'yes'
# else:
# APP_TYPE_CHOICES.append(i)
# APP_TYPE_CHOICES_IDS.append(i[0])
#context['app_apptypes']= APP_TYPE_CHOICES
#applications = Application.objects.filter(app_type__in=APP_TYPE_CHOICES_IDS)
#print applications
return context
class HomePageOLD(LoginRequiredMixin, TemplateView):
# TODO: rename this view to something like UserDashboard.
template_name = 'applications/home_page.html'
def get_context_data(self, **kwargs):
context = super(HomePage, self).get_context_data(**kwargs)
if Application.objects.filter(assignee=self.request.user).exclude(state__in=[Application.APP_STATE_CHOICES.issued, Application.APP_STATE_CHOICES.declined]).exists():
applications_wip = Application.objects.filter(
assignee=self.request.user).exclude(state__in=[Application.APP_STATE_CHOICES.issued, Application.APP_STATE_CHOICES.declined])
context['applications_wip'] = self.create_applist(applications_wip)
#if Application.objects.filter(assignee=self.request.user).exclude(state__in=[Application.APP_STATE_CHOICES.issued, Application.APP_STATE_CHOICES.declined]).exists():
# userGroups = self.request.user.groups.all()
userGroups = []
for g in self.request.user.groups.all():
userGroups.append(g.name)
applications_groups = Application.objects.filter(group__name__in=userGroups).exclude(state__in=[Application.APP_STATE_CHOICES.issued, Application.APP_STATE_CHOICES.declined])
context['applications_groups'] = self.create_applist(applications_groups)
if Application.objects.filter(applicant=self.request.user).exists():
applications_submitted = Application.objects.filter(
applicant=self.request.user).exclude(assignee=self.request.user)
context['applications_submitted'] = self.create_applist(applications_submitted)
if Referral.objects.filter(referee=self.request.user).exists():
context['referrals'] = Referral.objects.filter(
referee=self.request.user, status=Referral.REFERRAL_STATUS_CHOICES.referred)
# TODO: any restrictions on who can create new applications?
context['may_create'] = True
# Processor users only: show unassigned applications.
processor = Group.objects.get(name='Statdev Processor')
if processor in self.request.user.groups.all() or self.request.user.is_superuser:
if Application.objects.filter(assignee__isnull=True, state=Application.APP_STATE_CHOICES.with_admin).exists():
applications_unassigned = Application.objects.filter(
assignee__isnull=True, state=Application.APP_STATE_CHOICES.with_admin)
context['applications_unassigned'] = self.create_applist(applications_unassigned)
# Rule: admin officers may self-assign applications.
context['may_assign_processor'] = True
return context
def create_applist(self, applications):
usergroups = self.request.user.groups.all()
app_list = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
if app.group in usergroups:
if app.group is not None:
row['may_assign_to_person'] = 'True'
app_list.append(row)
return app_list
class PopupNotification(TemplateView):
template_name = 'applications/popup-notification.html'
def get(self, request, *args, **kwargs):
#messages.error(self.request,"Please complete at least one phone number")
#messages.success(self.request,"Please complete at least one phone number")
#messages.warning(self.request,"Please complete at least one phone number")
return super(PopupNotification, self).get(request, *args, **kwargs)
class NotificationInsidePopup(TemplateView):
template_name = 'applications/popup-inside-notification.html'
def get(self, request, *args, **kwargs):
#messages.error(self.request,"Please complete at least one phone number")
return super(NotificationInsidePopup, self).get(request, *args, **kwargs)
class FirstLoginInfo(LoginRequiredMixin,CreateView):
template_name = 'applications/firstlogin.html'
model = EmailUser
form_class = apps_forms.FirstLoginInfoForm
def get(self, request, *args, **kwargs):
return super(FirstLoginInfo, self).get(request, *args, **kwargs)
def get_initial(self):
initial = super(FirstLoginInfo, self).get_initial()
#initial['action'] = self.kwargs['action']
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = self.get_object().application_set.first()
return HttpResponseRedirect(app.get_absolute_url())
return super(FirstLoginInfo, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
forms_data = form.cleaned_data
action = self.kwargs['action']
nextstep = ''
apply_on_behalf_of = 0
app = Application.objects.get(pk=self.object.pk)
return HttpResponseRedirect(success_url)
class setUrl():
value = ''
url = ''
path = ''
def __repr__(self):
return self.value
class FirstLoginInfoSteps(LoginRequiredMixin,UpdateView):
template_name = 'applications/firstlogin.html'
model = EmailUser
form_class = apps_forms.FirstLoginInfoForm
def get(self, request, *args, **kwargs):
pk = int(kwargs['pk'])
if request.user.is_staff == True or request.user.is_superuser == True or request.user.id == pk:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(FirstLoginInfoSteps, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(FirstLoginInfoSteps, self).get_context_data(**kwargs)
step = self.kwargs['step']
if step == '1':
context['step1'] = 'active'
context['step2'] = 'disabled'
context['step3'] = 'disabled'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '2':
context['step2'] = 'active'
context['step3'] = 'disabled'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '3':
context['step3'] = 'active'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '4':
context['step4'] = 'active'
context['step5'] = 'disabled'
elif step == '5':
context['step5'] = 'active'
return context
def get_initial(self):
initial = super(FirstLoginInfoSteps, self).get_initial()
person = self.get_object()
# initial['action'] = self.kwargs['action']
# print self.kwargs['step']
step = self.kwargs['step']
if person.identification2:
#person.identification2.upload.url = '/jason/jhaso'
url_data = setUrl()
url_data.url = "/private-ledger/view/"+str(person.identification2.id)+'-'+person.identification2.name+'.'+person.identification2.extension
url_data.value = str(person.identification2.id)+'-'+person.identification2.name+'.'+person.identification2.extension
initial['identification2'] = url_data
#initial['identification2'] = person.identification2.upload
if step == '3':
if self.object.postal_address is None:
initial['country'] = 'AU'
initial['state'] = 'WA'
else:
postal_address = Address.objects.get(id=self.object.postal_address.id)
initial['line1'] = postal_address.line1
initial['line2'] = postal_address.line2
initial['line3'] = postal_address.line3
initial['locality'] = postal_address.locality
initial['state'] = postal_address.state
initial['country'] = postal_address.country
initial['postcode'] = postal_address.postcode
initial['step'] = self.kwargs['step']
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = self.get_object().application_set.first()
return HttpResponseRedirect(app.get_absolute_url())
return super(FirstLoginInfoSteps, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
forms_data = form.cleaned_data
step = self.kwargs['step']
app_id = None
if 'application_id' in self.kwargs:
app_id = self.kwargs['application_id']
if step == '3':
if self.object.postal_address is None:
postal_address = Address.objects.create(line1=forms_data['line1'],
line2=forms_data['line2'],
line3=forms_data['line3'],
locality=forms_data['locality'],
state=forms_data['state'],
country=forms_data['country'],
postcode=forms_data['postcode'],
user=self.object
)
self.object.postal_address = postal_address
else:
postal_address = Address.objects.get(id=self.object.postal_address.id)
postal_address.line1 = forms_data['line1']
postal_address.line2 = forms_data['line2']
postal_address.line3 = forms_data['line3']
postal_address.locality = forms_data['locality']
postal_address.state = forms_data['state']
postal_address.country = forms_data['country']
postal_address.postcode = forms_data['postcode']
postal_address.save()
if step == '4':
if self.object.mobile_number is None:
self.object.mobile_number = ""
if self.object.phone_number is None:
self.object.phone_number = ""
if len(self.object.mobile_number) == 0 and len(self.object.phone_number) == 0:
messages.error(self.request,"Please complete at least one phone number")
if app_id is None:
return HttpResponseRedirect(reverse('first_login_info_steps',args=(self.object.pk, step)))
else:
return HttpResponseRedirect(reverse('first_login_info_steps_application',args=(self.object.pk, step, app_id)))
# Upload New Files
if self.request.FILES.get('identification2'): # Uploaded new file.
doc = Document()
if Attachment_Extension_Check('single', forms_data['identification2'], ['.jpg','.png','.pdf']) is False:
raise ValidationError('Identification contains and unallowed attachment extension.')
identification2_file = self.request.FILES['identification2']
data = base64.b64encode(identification2_file.read())
filename=forms_data['identification2'].name
api_key = settings.LEDGER_API_KEY
url = settings.LEDGER_API_URL+'/ledgergw/remote/documents/update/'+api_key+'/'
extension =''
if filename[-4:][:-3] == '.':
extension = filename[-3:]
if filename[-5:][:-4] == '.':
extension = filename[-4:]
base64_url = "data:"+mimetypes.types_map['.'+str(extension)]+";base64,"+data.decode()
myobj = {'emailuser_id' :self.object.pk,'filebase64': base64_url, 'extension': extension, 'file_group_id': 1}
try:
resp = requests.post(url, data = myobj)
# temporary until all EmailUser Updates go via api.
eu_obj = EmailUser.objects.get(id=self.object.pk)
self.object.identification2=eu_obj.identification2
except:
messages.error(self.request, 'Error Saving Identification File')
if app_id is None:
return HttpResponseRedirect(reverse('first_login_info_steps',args=(self.object.pk, step)))
else:
return HttpResponseRedirect(reverse('first_login_info_steps_application',args=(self.object.pk, step, app_id)))
# temporary until all EmailUser Updates go via api.
eu_obj = EmailUser.objects.get(id=self.object.pk)
self.object.identification2=eu_obj.identification2
#print (image_string)
#doc.file = forms_data['identification2']
#doc.name = forms_data['identification2'].name
#doc.save()
#self.object.identification2 = doc
self.object.save()
nextstep = 1
# action = self.kwargs['action']
if self.request.POST.get('prev-step'):
if step == '1':
nextstep = 1
elif step == '2':
nextstep = 1
elif step == '3':
nextstep = 2
elif step == '4':
nextstep = 3
elif step == '5':
nextstep = 4
else:
if step == '1':
nextstep = 2
elif step == '2':
nextstep = 3
elif step == '3':
nextstep = 4
elif step == '4':
nextstep = 5
else:
nextstep = 6
if nextstep == 6:
#print forms_data['manage_permits']
if forms_data['manage_permits'] == 'True':
messages.success(self.request, 'Registration is now complete. Please now complete the company form.')
#return HttpResponseRedirect(reverse('company_create_link', args=(self.request.user.id,'1')))
if app_id is None:
return HttpResponseRedirect(reverse('company_create_link', args=(self.object.pk,'1')))
else:
return HttpResponseRedirect(reverse('company_create_link_application', args=(self.object.pk,'1',app_id)))
else:
messages.success(self.request, 'Registration is now complete.')
if app_id is None:
return HttpResponseRedirect(reverse('home_page'))
else:
if self.request.user.is_staff is True:
app = Application.objects.get(id=app_id)
app.applicant = self.object
app.save()
return HttpResponseRedirect(reverse('application_update', args=(app_id,)))
else:
if app_id is None:
return HttpResponseRedirect(reverse('first_login_info_steps',args=(self.object.pk, nextstep)))
else:
return HttpResponseRedirect(reverse('first_login_info_steps_application',args=(self.object.pk, nextstep, app_id)))
class CreateLinkCompany(LoginRequiredMixin,CreateView):
template_name = 'applications/companycreatelink.html'
model = EmailUser
form_class = apps_forms.CreateLinkCompanyForm
def get(self, request, *args, **kwargs):
return super(CreateLinkCompany, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(CreateLinkCompany, self).get_context_data(**kwargs)
step = self.kwargs['step']
context['user_id'] = self.kwargs['pk']
if 'po_id' in self.kwargs:
context['po_id'] = self.kwargs['po_id']
else:
context['po_id'] = 0
if step == '1':
context['step1'] = 'active'
context['step2'] = 'disabled'
context['step3'] = 'disabled'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '2':
context['step2'] = 'active'
context['step3'] = 'disabled'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '3':
context['step3'] = 'active'
context['step4'] = 'disabled'
context['step5'] = 'disabled'
elif step == '4':
context['step4'] = 'active'
context['step5'] = 'disabled'
elif step == '5':
context['step5'] = 'active'
context['messages'] = messages.get_messages(self.request)
return context
def get_initial(self):
initial = super(CreateLinkCompany, self).get_initial()
step = self.kwargs['step']
initial['step'] = self.kwargs['step']
initial['company_exists'] = ''
pending_org = None
if 'po_id' in self.kwargs:
po_id = self.kwargs['po_id']
if po_id:
pending_org = OrganisationPending.objects.get(id=po_id)
initial['company_name'] = pending_org.name
initial['abn'] = pending_org.abn
initial['pin1'] = pending_org.pin1
initial['pin2'] = pending_org.pin2
if step == '2':
if 'abn' in initial:
abn = initial['abn']
try:
if Organisation.objects.filter(abn=abn).exists():
company = Organisation.objects.get(abn=abn) #(abn=abn)
if OrganisationExtras.objects.filter(organisation=company.id).exists():
companyextras = OrganisationExtras.objects.get(organisation=company.id)
initial['company_id'] = company.id
initial['company_exists'] = 'yes'
listusers = Delegate.objects.filter(organisation__id=company.id)
delegate_people = ''
for lu in listusers:
if delegate_people == '':
delegate_people = lu.email_user.first_name + ' '+ lu.email_user.last_name
else:
delegate_people = delegate_people + ', ' + lu.email_user.first_name + ' ' + lu.email_user.last_name
initial['company_delegates'] = delegate_people
else:
initial['company_exists'] = 'no'
else:
initial['company_exists'] = 'no'
except Organisation.DoesNotExist:
initial['company_exists'] = 'no'
# try:
# companyextras = OrganisationExtras.objects.get(id=company.id)
# except OrganisationExtras.DoesNotExist:
# initial['company_exists'] = 'no'
if pending_org is not None:
if pending_org.identification:
initial['identification'] = pending_org.identification.upload
if step == '3':
if pending_org.pin1 and pending_org.pin2:
if Organisation.objects.filter(abn=pending_org.abn).exists():
company = Organisation.objects.get(abn=pending_org.abn)
if OrganisationExtras.objects.filter(organisation=company, pin1=pending_org.pin1,pin2=pending_org.pin2).exists():
initial['postal_line1'] = company.postal_address.line1
initial['postal_line2'] = company.postal_address.line2
initial['postal_line3'] = company.postal_address.line3
initial['postal_locality'] = company.postal_address.locality
initial['postal_state'] = company.postal_address.state
initial['postal_country'] = company.postal_address.country
initial['postal_postcode'] = company.postal_address.postcode
initial['billing_line1'] = company.billing_address.line1
initial['billing_line2'] = company.billing_address.line2
initial['billing_line3'] = company.billing_address.line3
initial['billing_locality'] = company.billing_address.locality
initial['billing_state'] = company.billing_address.state
initial['billing_country'] = company.billing_address.country
initial['billing_postcode'] = company.billing_address.postcode
else:
if pending_org.postal_address is not None:
postal_address = OrganisationAddress.objects.get(id=pending_org.postal_address.id)
billing_address = OrganisationAddress.objects.get(id=pending_org.billing_address.id)
initial['postal_line1'] = postal_address.line1
initial['postal_line2'] = postal_address.line2
initial['postal_line3'] = postal_address.line3
initial['postal_locality'] = postal_address.locality
initial['postal_state'] = postal_address.state
initial['postal_country'] = postal_address.country
initial['postal_postcode'] = postal_address.postcode
else:
initial['postal_state'] = 'WA'
initial['postal_country'] = 'AU'
if pending_org.billing_address is not None:
initial['billing_line1'] = billing_address.line1
initial['billing_line2'] = billing_address.line2
initial['billing_line3'] = billing_address.line3
initial['billing_locality'] = billing_address.locality
initial['billing_state'] = billing_address.state
initial['billing_country'] = billing_address.country
initial['billing_postcode'] = billing_address.postcode
else:
initial['billing_state'] = 'WA'
initial['billing_country'] = 'AU'
if step == '4':
initial['company_exists'] = 'no'
if pending_org.pin1 and pending_org.pin2:
if Organisation.objects.filter(abn=pending_org.abn).exists():
initial['company_exists'] = 'yes'
return initial
def post(self, request, *args, **kwargs):
#messages.error(self.request, 'Invalid Pins ')
#print request.path
step = self.kwargs['step']
if step == '2':
company_exists = 'no'
if 'company_exists' in request.POST:
company_exists = request.POST['company_exists']
if company_exists == 'yes':
company_id = request.POST['company_id']
pin1 = request.POST['pin1']
pin2 = request.POST['pin2']
pin1 = pin1.replace(" ", "")
pin2 = pin2.replace(" ", "")
comp = Organisation.objects.get(id=company_id)
if OrganisationExtras.objects.filter(organisation=comp, pin1=pin1,pin2=pin2).exists():
messages.success(self.request, 'Company Pins Correct')
else:
messages.error(self.request, 'Incorrect Company Pins')
return HttpResponseRedirect(request.path)
else:
if 'identification' in request.FILES:
if Attachment_Extension_Check('single', request.FILES['identification'], ['.pdf','.png','.jpg']) is False:
messages.error(self.request,'Identification contains and unallowed attachment extension.')
return HttpResponseRedirect(request.path)
if request.POST.get('cancel'):
app = self.get_object().application_set.first()
return HttpResponseRedirect(app.get_absolute_url())
return super(CreateLinkCompany, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
forms_data = form.cleaned_data
pk = self.kwargs['pk']
step = self.kwargs['step']
pending_org = None
if 'po_id' in self.kwargs:
po_id = self.kwargs['po_id']
if po_id:
pending_org = OrganisationPending.objects.get(id=po_id)
if step == '1':
abn = self.request.POST.get('abn')
company_name = self.request.POST.get('company_name')
if pending_org:
pending_org.name = company_name
pending_org.abn = abn
pending_org.save()
else:
user = EmailUser.objects.get(pk=pk)
pending_org = OrganisationPending.objects.create(name=company_name,abn=abn,email_user=user)
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.create,
action='Organisation Link/Creation Started')
action.save()
if step == '2':
company_exists = forms_data['company_exists']
if company_exists == 'yes':
# print "COMP"
company_id = forms_data['company_id']
pin1 = forms_data['pin1']
pin2 = forms_data['pin2']
pin1 = pin1.replace(" ", "")
pin2 = pin2.replace(" ", "")
comp = Organisation.objects.get(id=company_id)
if OrganisationExtras.objects.filter(organisation=comp, pin1=pin1,pin2=pin2).exists():
pending_org.pin1 = pin1
pending_org.pin2 = pin2
pending_org.company_exists = True
pending_org.save()
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Organisation Pins Verified')
action.save()
#else:
#print "INCORR"
#,id=company_id)
# print "YESYY"
# print forms_data['pin1']
# print forms_data['pin2']
else:
if forms_data['identification']:
doc = Record()
if Attachment_Extension_Check('single', forms_data['identification'], ['.pdf','.png','.jpg']) is False:
raise ValidationError('Identification contains and unallowed attachment extension.')
doc.upload = forms_data['identification']
doc.name = forms_data['identification'].name
doc.save()
pending_org.identification = doc
pending_org.company_exists = False
pending_org.save()
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Identification Added')
action.save()
if step == '3':
if pending_org.postal_address is None or pending_org.billing_address is None:
print ("FORMS")
print (forms_data)
postal_address = OrganisationAddress.objects.create(line1=forms_data['postal_line1'],
line2=forms_data['postal_line2'],
line3=forms_data['postal_line3'],
locality=forms_data['postal_locality'],
state=forms_data['postal_state'],
country=forms_data['postal_country'],
postcode=forms_data['postal_postcode']
)
billing_address = OrganisationAddress.objects.create(line1=forms_data['billing_line1'],
line2=forms_data['billing_line2'],
line3=forms_data['billing_line3'],
locality=forms_data['billing_locality'],
state=forms_data['billing_state'],
country=forms_data['billing_country'],
postcode=forms_data['billing_postcode']
)
pending_org.postal_address = postal_address
pending_org.billing_address = billing_address
pending_org.save()
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Address Details Added')
action.save()
else:
postal_address = OrganisationAddress.objects.get(id=pending_org.postal_address.id)
billing_address = OrganisationAddress.objects.get(id=pending_org.billing_address.id)
postal_address.line1=forms_data['postal_line1']
postal_address.line2=forms_data['postal_line2']
postal_address.line3=forms_data['postal_line3']
postal_address.locality=forms_data['postal_locality']
postal_address.state=forms_data['postal_state']
postal_address.country=forms_data['postal_country']
postal_address.postcode=forms_data['postal_postcode']
postal_address.save()
billing_address.line1=forms_data['billing_line1']
billing_address.line2=forms_data['billing_line2']
billing_address.line3=forms_data['billing_line3']
billing_address.locality=forms_data['billing_locality']
billing_address.state=forms_data['billing_state']
billing_address.country=forms_data['billing_country']
billing_address.postcode=forms_data['postal_postcode']
billing_address.save()
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Address Details Updated')
action.save()
#pending_org.identification
# try:
# company = Organisation.objects.get(abn=abn)
# initial['company_exists'] = 'yes'
# except Organisation.DoesNotExist:
# initial['company_exists'] = 'no'
# pending_org = OrganisationPending.objects.create(name=company_name,abn=abn)
# print pending_org
nextstep = 1
if self.request.POST.get('prev-step'):
if step == '1':
nextstep = 1
elif step == '2':
nextstep = 1
elif step == '3':
nextstep = 2
elif step == '4':
nextstep = 3
elif step == '5':
nextstep = 4
else:
if step == '1':
nextstep = 2
elif step == '2':
nextstep = 3
elif step == '3':
nextstep = 4
elif step == '4':
nextstep = 5
else:
nextstep = 6
app_id = None
if 'application_id' in self.kwargs:
app_id = self.kwargs['application_id']
if nextstep == 5:
# print pending_org.company_exists
if pending_org.company_exists == True:
pending_org.status = 2
comp = Organisation.objects.get(abn=pending_org.abn)
Delegate.objects.create(email_user=pending_org.email_user,organisation=comp)
#print "Approved"
messages.success(self.request, 'Your company has now been linked.')
pending_org.save()
action = Action(
content_object=pending_org, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Organisation Approved (Automatically)')
action.save()
OrganisationContact.objects.create(
email=pending_org.email_user.email,
first_name=pending_org.email_user.first_name,
last_name=pending_org.email_user.last_name,
phone_number=pending_org.email_user.phone_number,
mobile_number=pending_org.email_user.mobile_number,
fax_number=pending_org.email_user.fax_number,
organisation=comp
)
else:
if self.request.user.is_staff is True:
pass
else:
messages.success(self.request, 'Your company has been submitted for approval and now pending attention by our Staff.')
action = Action(
content_object=pending_org, user=self.request.user,
action='Organisation is pending approval')
action.save()
emailcontext = {'pending_org': pending_org, }
emailGroup('Organisation pending approval ', emailcontext, 'pending_organisation_approval.html', None, None, None, 'Statdev Assessor')
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
if app_id is None:
return HttpResponseRedirect(reverse('home_page'))
else:
return HttpResponseRedirect(reverse('organisation_access_requests_change_applicant', args=(pending_org.id,'approve',app_id)))
else:
if pending_org:
#return HttpResponseRedirect(reverse('company_create_link_steps',args=(self.request.user.id, nextstep,pending_org.id)))
if app_id is None:
return HttpResponseRedirect(reverse('company_create_link_steps',args=(pk, nextstep,pending_org.id)))
else:
return HttpResponseRedirect(reverse('company_create_link_steps_application',args=(pk, nextstep,pending_org.id,app_id)))
else:
if app_id is None:
return HttpResponseRedirect(reverse('company_create_link',args=(pk,nextstep)))
else:
return HttpResponseRedirect(reverse('company_create_link_application',args=(pk,nextstep,app_id)))
return HttpResponseRedirect(reverse('home_page'))
class ApplicationApplicantChange(LoginRequiredMixin,DetailView):
# form_class = apps_forms.ApplicationCreateForm
template_name = 'applications/applicant_applicantsearch.html'
model = Application
def get_queryset(self):
qs = super(ApplicationApplicantChange, self).get_queryset()
return qs
def get_context_data(self, **kwargs):
#listusers = EmailUser.objects.all()
listorgs = []
context = super(ApplicationApplicantChange, self).get_context_data(**kwargs)
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
search_filter = Q(first_name__icontains=query_str) | Q(last_name__icontains=query_str) | Q(email__icontains=query_str)
listusers = EmailUser.objects.filter(search_filter).exclude(is_staff=True)[:100]
else:
listusers = EmailUser.objects.all().exclude(is_staff=True)[:100]
context['acc_list'] = []
for lu in listusers:
row = {}
row['acc_row'] = lu
lu.organisations = []
lu.organisations = Delegate.objects.filter(email_user=lu.id)
context['acc_list'].append(row)
context['applicant_id'] = self.object.pk
context['person_tab'] = 'active'
return context
class ApplicationApplicantCompanyChange(LoginRequiredMixin,DetailView):
# form_class = apps_forms.ApplicationCreateForm
template_name = 'applications/applicant_applicant_company_search.html'
model = Application
def get_queryset(self):
qs = super(ApplicationApplicantCompanyChange, self).get_queryset()
return qs
def get_context_data(self, **kwargs):
listorgs = []
context = super(ApplicationApplicantCompanyChange, self).get_context_data(**kwargs)
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
list_orgs = OrganisationExtras.objects.filter(organisation__name__icontains=query_str)
#, organisation__postal_address__icontains=query_str)
else:
list_orgs = OrganisationExtras.objects.all()
context['item_list'] = []
for lu in list_orgs:
row = {}
row['item_row'] = lu
context['item_list'].append(row)
context['company_id'] = self.object.pk
context['company_tab'] = 'active'
return context
class ApplicationFlows(LoginRequiredMixin,TemplateView):
#model = Application
template_name = 'applications/application_flows.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationFlows, self).get(request, *args, **kwargs)
# def get_queryset(self):
# qs = super(ApplicationFlows, self).get_queryset()
#
# # Did we pass in a search string? If so, filter the queryset and return
# # it.
# if 'q' in self.request.GET and self.request.GET['q']:
# query_str = self.request.GET['q']
# # Replace single-quotes with double-quotes
# query_str = query_str.replace("'", r'"')
# # Filter by pk, title, applicant__email, organisation__name,
# # assignee__email
# query = get_query(
# query_str, ['pk', 'title', 'applicant__email', 'organisation__name', 'assignee__email'])
# qs = qs.filter(query).distinct()
# return qs
def get_context_data(self, **kwargs):
context = super(ApplicationFlows, self).get_context_data(**kwargs)
context['query_string'] = ''
context['application_types'] = Application.APP_TYPE_CHOICES._identifier_map
context['application_choices'] = Application.APP_TYPE_CHOICES
processor = Group.objects.get(name='Statdev Processor')
for b in Application.APP_TYPE_CHOICES._identifier_map:
print(b)
print(Application.APP_TYPE_CHOICES._identifier_map[b])
# Rule: admin officers may self-assign applications.
if processor in self.request.user.groups.all() or self.request.user.is_superuser:
context['may_assign_processor'] = True
return context
class ApplicationFlowRoutes(LoginRequiredMixin,TemplateView):
#model = Application
template_name = 'applications/application_flow_routes.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing = ""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationFlowRoutes, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationFlowRoutes, self).get_context_data(**kwargs)
context['query_string'] = ''
context['application_types'] = Application.APP_TYPE_CHOICES._identifier_map
context['application_choices'] = Application.APP_TYPE_CHOICES
route = self.request.GET.get('route',1)
context['route'] = route
#processor = Group.objects.get(name='Processor')
pk = kwargs['pk']
print (pk)
app_type = None
for b in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[b] == int(pk):
app_type = b
print(b)
if app_type:
flow = Flow()
flow.get(app_type)
#print (kwargs)
#print (flow.json_obj)
#context['workflow'] = flow.json_obj
workflow_steps = flow.json_obj
if 'options' in workflow_steps:
del workflow_steps['options']
#context['workflow'] = sorted(workflow_steps.items(), key=lambda dct: float(dct[0]))
context['workflow'] = workflow_steps
context['workflow_route'] = flow.getAllRouteConf(app_type,route)
if 'condition_based_actions' in context['workflow_route']:
context['workflow_route']['condition_based_actions'] = context['workflow_route']['condition-based-actions']
else:
context['workflow_route']['condition_based_actions'] = ''
print (context['workflow_route']['condition_based_actions'])
# for b in Application.APP_TYPE_CHOICES._identifier_map:
# print(b)
# print(Application.APP_TYPE_CHOICES._identifier_map[b])
# Rule: admin officers may self-assign applications.
#if self.request.user.groups.all() or self.request.user.is_superuser:
# context['may_assign_processor'] = True
return context
class ApplicationFlowDiagrams(LoginRequiredMixin,TemplateView):
#model = Application
template_name = 'applications/application_flow_diagrams.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing = ""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationFlowDiagrams, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationFlowDiagrams, self).get_context_data(**kwargs)
context['query_string'] = ''
context['application_types'] = Application.APP_TYPE_CHOICES._identifier_map
context['application_choices'] = Application.APP_TYPE_CHOICES
print ('DIA')
route = self.request.GET.get('route',1)
context['route'] = route
print (vars(Application.APP_TYPE_CHOICES))
#processor = Group.objects.get(name='Processor')
pk = kwargs['pk']
print (pk)
context['app_type'] = pk
context['application_type_name'] = Application.APP_TYPE_CHOICES._display_map[int(pk)]
app_type = None
for b in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[b] == int(pk):
app_type = b
print(b)
if app_type:
print ("APP TYPE")
flow = Flow()
flow.get(app_type)
#print (kwargs)
#print (flow.json_obj)
workflow_steps = flow.json_obj
print (workflow_steps["1"]["title"])
for i in workflow_steps.keys():
if i == "options":
pass
else:
#print (i)
#print(workflow_steps[i]["title"])
workflow_steps[i]['step_id'] = str(i).replace(".","_")
#print (workflow_steps[i]['step_id'])
for a in workflow_steps[i]['actions']:
print (a)
a['route_id'] = str(a['route']).replace(".","_")
print (a)
if 'options' in workflow_steps:
del workflow_steps['options']
# context['workflow'] = sorted(workflow_steps.items(), key=lambda dct: float(dct[0]))
context['workflow'] = workflow_steps.items()
#context['workflow'][1][0] = '2-22'
#print (context['workflow'][1][0])
#b = i[0].replace(".","-")
#print (b)
print (context['workflow'])
context['workflow_route'] = flow.getAllRouteConf(app_type,route)
if 'condition_based_actions' in context['workflow_route']:
context['workflow_route']['condition_based_actions'] = context['workflow_route']['condition-based-actions']
else:
context['workflow_route']['condition_based_actions'] = ''
#print (context['workflow'])
return context
class ApplicationList(LoginRequiredMixin,ListView):
model = Application
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationList, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(ApplicationList, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return
# it.
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by pk, title, applicant__email, organisation__name,
# assignee__email
query = get_query(
query_str, ['pk', 'title', 'applicant__email', 'organisation__name', 'assignee__email'])
qs = qs.filter(query).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(ApplicationList, self).get_context_data(**kwargs)
context['query_string'] = ''
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
for i in Application.APP_TYPE_CHOICES:
if i[0] in [7,8,9,10,11]:
skip = 'yes'
else:
APP_TYPE_CHOICES.append(i)
APP_TYPE_CHOICES_IDS.append(i[0])
context['app_apptypes'] = APP_TYPE_CHOICES
context['app_appstatus'] = Application.APP_STATUS
if 'action' in self.request.GET and self.request.GET['action']:
query_str = self.request.GET['q']
query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str) | Q(description__icontains=query_str) | Q(related_permits__icontains=query_str) | Q(jetties__icontains=query_str) | Q(drop_off_pick_up__icontains=query_str) | Q(sullage_disposal__icontains=query_str) | Q(waste_disposal__icontains=query_str) | Q(refuel_location_method__icontains=query_str) | Q(berth_location__icontains=query_str) | Q(anchorage__icontains=query_str) | Q(operating_details__icontains=query_str) | Q(proposed_development_description__icontains=query_str)
print ("APP TTPE")
print (self.request.GET['apptype'])
if self.request.GET['apptype'] != '':
query_obj &= Q(app_type=int(self.request.GET['apptype']))
else:
query_obj &= Q(app_type__in=APP_TYPE_CHOICES_IDS)
if self.request.GET['applicant'] != '':
query_obj &= Q(applicant=int(self.request.GET['applicant']))
if self.request.GET['wfstatus'] != '':
#query_obj &= Q(state=int(self.request.GET['appstatus']))
query_obj &= Q(route_status=self.request.GET['wfstatus'])
if self.request.GET['appstatus'] != '' and self.request.GET['appstatus'] != 'all':
query_obj &= Q(status=self.request.GET['appstatus'])
if 'from_date' in self.request.GET:
context['from_date'] = self.request.GET['from_date']
context['to_date'] = self.request.GET['to_date']
if self.request.GET['from_date'] != '':
from_date_db = datetime.strptime(self.request.GET['from_date'], '%d/%m/%Y').date()
query_obj &= Q(submit_date__gte=from_date_db)
if self.request.GET['to_date'] != '':
to_date_db = datetime.strptime(self.request.GET['to_date'], '%d/%m/%Y').date()
query_obj &= Q(submit_date__lte=to_date_db)
applications = Application.objects.filter(query_obj).order_by('-id')
context['query_string'] = self.request.GET['q']
if self.request.GET['apptype'] != '':
context['apptype'] = int(self.request.GET['apptype'])
if self.request.GET['applicant'] != '':
context['applicant'] = int(self.request.GET['applicant'])
context['appstatus'] = self.request.GET['appstatus']
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
#context['appstatus'] = int(self.request.GET['appstatus'])
context['appstatus'] = self.request.GET['appstatus']
if 'wfstatus' in self.request.GET:
if self.request.GET['wfstatus'] != '':
#context['appstatus'] = int(self.request.GET['appstatus'])
context['wfstatus'] = self.request.GET['wfstatus']
else:
to_date = datetime.today()
from_date = datetime.today() - timedelta(days=10)
context['from_date'] = from_date.strftime('%d/%m/%Y')
context['to_date'] = to_date.strftime('%d/%m/%Y')
context['appstatus'] = 1
applications = Application.objects.filter(app_type__in=APP_TYPE_CHOICES_IDS, submit_date__gte=from_date, submit_date__lte=to_date, status=1).order_by('-id')
context['app_applicants'] = {}
context['app_applicants_list'] = []
# context['app_apptypes'] = list(Application.APP_TYPE_CHOICES)
#context['app_appstatus'] = list(Application.APP_STATE_CHOICES)
context['app_wfstatus'] = list(Application.objects.values_list('route_status',flat = True).distinct())
context['app_appstatus'] = Application.APP_STATUS
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['may_assign_to_officer'] = 'False'
row['app'] = app
# Create a distinct list of applicants
if app.applicant:
if app.applicant.id in context['app_applicants']:
donothing = ''
else:
context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name })
# end of creation
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
row['may_assign_to_officer'] = 'True'
context['app_list'].append(row)
# TODO: any restrictions on who can create new applications?
context['may_create'] = True
processor = Group.objects.get(name='Statdev Processor')
# Rule: admin officers may self-assign applications.
if processor in self.request.user.groups.all() or self.request.user.is_superuser:
context['may_assign_processor'] = True
return context
class EmergencyWorksList(ListView):
model = Application
template_name = 'applications/emergencyworks_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(EmergencyWorksList, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(EmergencyWorksList, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return
# it.
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by pk, title, applicant__email, organisation__name,
# assignee__email
query = get_query(
query_str, ['pk', 'title', 'applicant__email', 'organisation__name', 'assignee__email'])
qs = qs.filter(query).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(EmergencyWorksList, self).get_context_data(**kwargs)
context['query_string'] = ''
applications = Application.objects.filter(app_type=4)
context['app_applicants'] = {}
context['app_applicants_list'] = []
context['app_apptypes'] = list(Application.APP_TYPE_CHOICES)
APP_STATUS_CHOICES = []
for i in Application.APP_STATE_CHOICES:
if i[0] in [1,11,16]:
APP_STATUS_CHOICES.append(i)
context['app_appstatus'] = list(APP_STATUS_CHOICES)
if 'action' in self.request.GET and self.request.GET['action']:
query_str = self.request.GET['q']
query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str)
query_obj &= Q(app_type=4)
if self.request.GET['applicant'] != '':
query_obj &= Q(applicant=int(self.request.GET['applicant']))
if self.request.GET['appstatus'] != '':
query_obj &= Q(state=int(self.request.GET['appstatus']))
applications = Application.objects.filter(query_obj)
context['query_string'] = self.request.GET['q']
if 'applicant' in self.request.GET:
if self.request.GET['applicant'] != '':
context['applicant'] = int(self.request.GET['applicant'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
# Create a distinct list of applicants
if app.applicant:
if app.applicant.id in context['app_applicants']:
donothing = ''
else:
context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name })
# end of creation
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
context['app_list'].append(row)
# TODO: any restrictions on who can create new applications?
context['may_create'] = True
processor = Group.objects.get(name='Statdev Processor')
# Rule: admin officers may self-assign applications.
if processor in self.request.user.groups.all() or self.request.user.is_superuser:
context['may_assign_processor'] = True
return context
class ComplianceList(TemplateView):
#model = Compliance
template_name = 'applications/compliance_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff = context_processor['staff']
if staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceList, self).get(request, *args, **kwargs)
#def get_queryset(self):
# qs = super(ComplianceList, self).get_queryset()
# # Did we pass in a search string? If so, filter the queryset and return
# # it.
# if 'q' in self.request.GET and self.request.GET['q']:
# query_str = self.request.GET['q']
# # Replace single-quotes with double-quotes
# query_str = query_str.replace("'", r'"')
# # Filter by pk, title, applicant__email, organisation__name,
# # assignee__email
# query = get_query(
# query_str, ['pk', 'title', 'applicant__email', 'assignee__email','approval_id'])
# qs = qs.filter(query).distinct()
# return qs
def get_context_data(self, **kwargs):
context = super(ComplianceList, self).get_context_data(**kwargs)
context['query_string'] = ''
#items = ComplianceGroup.objects.filter().order_by('due_date')
context['app_applicants'] = {}
context['app_applicants_list'] = []
#context['app_apptypes'] = list(Application.APP_TYPE_CHOICES)
#applications = ComplianceGroup.objects.filter(query_obj)
APP_STATUS_CHOICES = []
for i in Application.APP_STATE_CHOICES:
if i[0] in [1,11,16]:
APP_STATUS_CHOICES.append(i)
context['app_appstatus'] = list(APP_STATUS_CHOICES)
query_obj = Q()
if 'action' in self.request.GET and self.request.GET['action']:
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_obj = Q(Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(assignee__email__icontains=query_str))
#query_obj &= Q(app_type=4)
context['query_string'] = self.request.GET['q']
#if self.request.GET['applicant'] != '':
# query_obj &= Q(applicant=int(self.request.GET['applicant']))
#if self.request.GET['appstatus'] != '':
# query_obj &= Q(state=int(self.request.GET['appstatus']))
if 'from_date' in self.request.GET:
context['from_date'] = self.request.GET['from_date']
context['to_date'] = self.request.GET['to_date']
if self.request.GET['from_date'] != '':
from_date_db = datetime.strptime(self.request.GET['from_date'], '%d/%m/%Y').date()
query_obj &= Q(due_date__gte=from_date_db)
if self.request.GET['to_date'] != '':
to_date_db = datetime.strptime(self.request.GET['to_date'], '%d/%m/%Y').date()
query_obj &= Q(due_date__lte=to_date_db)
else:
to_date = datetime.today()
from_date = datetime.today() - timedelta(days=100)
context['from_date'] = from_date.strftime('%d/%m/%Y')
context['to_date'] = to_date.strftime('%d/%m/%Y')
query_obj &= Q(due_date__gte=from_date)
query_obj &= Q(due_date__lte=to_date)
items = Compliance.objects.filter(query_obj).order_by('due_date')
context['items'] = items
if 'applicant' in self.request.GET:
if self.request.GET['applicant'] != '':
context['applicant'] = int(self.request.GET['applicant'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
usergroups = self.request.user.groups.all()
context['app_list'] = []
for item in items:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = item
# Create a distinct list of applicants
# if app.applicant:
# if app.applicant.id in context['app_applicants']:
# donothing = ''
# else:
# context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
# context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name })
# # end of creation
# if app.group is not None:
# if app.group in usergroups:
# row['may_assign_to_person'] = 'True'
# context['app_list'].append(row)
# TODO: any restrictions on who can create new applications?
context['may_create'] = True
processor = Group.objects.get(name='Statdev Processor')
# Rule: admin officers may self-assign applications.
if processor in self.request.user.groups.all() or self.request.user.is_superuser:
context['may_assign_processor'] = True
return context
class SearchMenu(ListView):
model = Compliance
template_name = 'applications/search_menu.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(SearchMenu, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(SearchMenu, self).get_context_data(**kwargs)
return context
class OrganisationAccessRequest(ListView):
model = OrganisationPending
template_name = 'applications/organisation_pending.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationAccessRequest, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(OrganisationAccessRequest, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return
# it.
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by pk, title, applicant__email, organisation__name,
# assignee__email
query = get_query(
query_str, ['pk'])
qs = qs.filter(query).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(OrganisationAccessRequest, self).get_context_data(**kwargs)
context['orgs_pending_status'] = OrganisationPending.STATUS_CHOICES
context['orgs_pending_applicants'] = OrganisationPending.objects.all().values('email_user','email_user__first_name','email_user__last_name').distinct('email_user')
query = Q()
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str = query_str.replace("'", r'"')
query &= Q(Q(name__icontains=query_str) | Q(abn__icontains=query_str))
if 'applicant' in self.request.GET:
if self.request.GET['applicant'] != '':
query |= Q(email_user=self.request.GET['applicant'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
query &= Q(status=self.request.GET['appstatus'])
context['orgs_pending'] = OrganisationPending.objects.filter(query)[:200]
if 'applicant' in self.request.GET:
if self.request.GET['applicant'] != '':
context['applicant'] = int(self.request.GET['applicant'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
context['query_string'] = ''
if 'q' in self.request.GET and self.request.GET['q']:
context['query_string'] = self.request.GET['q']
return context
class OrganisationAccessRequestUpdate(LoginRequiredMixin,UpdateView):
form_class = apps_forms.OrganisationAccessRequestForm
model = OrganisationPending
template_name = 'applications/organisation_pending_update.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationAccessRequestUpdate, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(OrganisationAccessRequestUpdate, self).get_queryset()
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str = query_str.replace("'", r'"')
query = get_query(
query_str, ['pk'])
qs = qs.filter(query).distinct()
return qs
def get_initial(self):
initial = super(OrganisationAccessRequestUpdate, self).get_initial()
status = self.kwargs['action']
if status == 'approve':
initial['status'] = 2
if status == 'decline':
initial['status'] = 3
return initial
def post(self, request, *args, **kwargs):
pk = self.kwargs['pk']
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('organisation_access_requests_view', args=(pk,) ))
return super(OrganisationAccessRequestUpdate, self).post(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationAccessRequestUpdate, self).get_context_data(**kwargs)
return context
def form_valid(self, form):
self.object = form.save(commit=False)
forms_data = form.cleaned_data
status = self.kwargs['action']
app_id = None
if 'application_id' in self.kwargs:
app_id = self.kwargs['application_id']
if status == 'approve':
# print self.object.name
# print self.object.abn
# print self.object.identification
# print self.object.postal_address
# print self.object.billing_address
doc_identification = Record(id=self.object.identification.id)
new_org = Organisation.objects.create(name=self.object.name,
abn=self.object.abn,
identification=None,
postal_address=self.object.postal_address,
billing_address=self.object.billing_address
)
OrganisationExtras.objects.create(organisation=new_org,
pin1=random_generator(),
pin2=random_generator(),
identification=doc_identification
)
Delegate.objects.create(email_user=self.object.email_user,organisation=new_org)
if self.request.user.is_staff is True:
if app_id:
app = Application.objects.get(id=app_id)
app.organisation = new_org
app.save()
# random_generator
#OrganisationExtras.objects.create()
self.object.status = 2
OrganisationContact.objects.create(
email=self.object.email_user.email,
first_name=self.object.email_user.first_name,
last_name=self.object.email_user.last_name,
phone_number=self.object.email_user.phone_number,
mobile_number=self.object.email_user.mobile_number,
fax_number=self.object.email_user.fax_number,
organisation=new_org
)
action = Action(
content_object=self.object, user=self.request.user,
action='Organisation Access Request Approved')
action.save()
elif status == 'decline':
self.object.status = 3
action = Action(
content_object=self.object, user=self.request.user,
action='Organisation Access Request Declined')
action.save()
self.object.save()
if app_id is None:
success_url = reverse('organisation_access_requests')
else:
success_url = reverse('application_update',args=(app_id,))
return HttpResponseRedirect(success_url)
class OrganisationAccessRequestView(LoginRequiredMixin,DetailView):
model = OrganisationPending
template_name = 'applications/organisation_pending_view.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationAccessRequestView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationAccessRequestView, self).get_context_data(**kwargs)
app = self.get_object()
try:
context['org'] = Organisation.objects.get(abn=app.abn)
except:
donothing = ''
# context['conditions'] = Compliance.objects.filter(approval_id=app.id)
return context
class SearchPersonList(ListView):
model = Compliance
template_name = 'applications/search_person_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(SearchPersonList, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(SearchPersonList, self).get_context_data(**kwargs)
context['query_string'] = ''
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
listorgs = Delegate.objects.filter(organisation__name__icontains=query_str)
orgs = []
for d in listorgs:
d.email_user.id
orgs.append(d.email_user.id)
for se_wo in query_str_split:
search_filter= Q(pk__contains=se_wo) | Q(email__icontains=se_wo) | Q(first_name__icontains=se_wo) | Q(last_name__icontains=se_wo)
# Add Organsations Results , Will also filter out duplicates
search_filter |= Q(pk__in=orgs)
# Get all applicants
listusers = EmailUser.objects.filter(search_filter).exclude(is_staff=True)[:200]
else:
listusers = EmailUser.objects.all().exclude(is_staff=True).order_by('-id')[:200]
context['acc_list'] = []
for lu in listusers:
row = {}
row['acc_row'] = lu
lu.organisations = []
lu.organisations = Delegate.objects.filter(email_user=lu.id)
#for o in lu.organisations:
# print o.organisation
context['acc_list'].append(row)
if 'q' in self.request.GET and self.request.GET['q']:
context['query_string'] = self.request.GET['q']
return context
class SearchCompanyList(ListView):
model = Compliance
template_name = 'applications/search_company_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(SearchCompanyList, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(SearchCompanyList, self).get_context_data(**kwargs)
context['query_string'] = ''
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
#listorgs = Delegate.objects.filter(organisation__name__icontains=query_str)
#orgs = []
#for d in listorgs:
# d.email_user.id
# orgs.append(d.email_user.id)
#for se_wo in query_str_split:
# search_filter= Q(pk__contains=se_wo) | Q(email__icontains=se_wo) | Q(first_name__icontains=se_wo) | Q(last_name__icontains=se_wo)
# Add Organsations Results , Will also filter out duplicates
#search_filter |= Q(pk__in=orgs)
# Get all applicants
# listusers = Delegate.objects.filter(organisation__name__icontains=query_str)
listusers = OrganisationExtras.objects.filter(organisation__name__icontains=query_str)[:200]
else:
# listusers = Delegate.objects.all()
listusers = OrganisationExtras.objects.all().order_by('-id')[:200]
context['acc_list'] = []
for lu in listusers:
row = {}
# print lu.organisation.name
row['acc_row'] = lu
# lu.organisations = []
# lu.organisations = Delegate.objects.filter(email_user=lu.id)
#for o in lu.organisations:
# print o.organisation
context['acc_list'].append(row)
if 'q' in self.request.GET and self.request.GET['q']:
context['query_string'] = self.request.GET['q']
return context
class SearchKeywords(ListView):
model = Compliance
template_name = 'applications/search_keywords_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(SearchKeywords, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(SearchKeywords, self).get_context_data(**kwargs)
context['APP_TYPES'] = Application.APP_TYPE_CHOICES
context['query_string'] = ''
APP_TYPE_CHOICES = [{"key":"applications", "value":"Applications"},{"key":"approvals","value":"Approvals"},{"key":"emergency","value":"Emergency Works"},{"key":"compliance","value":"Compliance"}]
app_list_filter = []
context['app_type_checkboxes'] = {}
if len(self.request.GET) == 0:
context['app_type_checkboxes'] = {'applications': 'checked', 'approvals': 'checked', 'emergency': 'checked','compliance': 'checked'}
# print app_list_filter
if "filter-applications" in self.request.GET:
app_list_filter.append(1)
app_list_filter.append(2)
app_list_filter.append(3)
context['app_type_checkboxes']['applications'] = 'checked'
# print app_list_filter
if "filter-emergency" in self.request.GET:
app_list_filter.append(4)
context['app_type_checkboxes']['emergency'] = 'checked'
if "filter-approvals" in self.request.GET:
context['app_type_checkboxes']['approvals'] = 'checked'
if "filter-compliance" in self.request.GET:
context['app_type_checkboxes']['compliance'] = 'checked'
# print app_list_filter
context['APP_TYPES'] = list(APP_TYPE_CHOICES)
query_str_split = ''
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
search_filter_app = Q(app_type__in=app_list_filter)
# Applications:
for se_wo in query_str_split:
search_filter = Q(pk__contains=se_wo)
search_filter |= Q(title__icontains=se_wo)
search_filter |= Q(description__icontains=se_wo)
search_filter |= Q(related_permits__icontains=se_wo)
search_filter |= Q(address__icontains=se_wo)
search_filter |= Q(jetties__icontains=se_wo)
search_filter |= Q(drop_off_pick_up__icontains=se_wo)
search_filter |= Q(sullage_disposal__icontains=se_wo)
search_filter |= Q(waste_disposal__icontains=se_wo)
search_filter |= Q(refuel_location_method__icontains=se_wo)
search_filter |= Q(berth_location__icontains=se_wo)
search_filter |= Q(anchorage__icontains=se_wo)
search_filter |= Q(operating_details__icontains=se_wo)
search_filter |= Q(proposed_development_current_use_of_land__icontains=se_wo)
search_filter |= Q(proposed_development_description__icontains=se_wo)
# Add Organsations Results , Will also filter out duplicates
# search_filter |= Q(pk__in=orgs)
# Get all applicants
apps = Application.objects.filter(search_filter_app & search_filter)
search_filter = Q()
for se_wo in query_str_split:
search_filter = Q(pk__contains=se_wo)
search_filter |= Q(title__icontains=se_wo)
approvals = []
if "filter-approvals" in self.request.GET:
approvals = Approval.objects.filter(search_filter)
compliance = []
if "filter-compliance" in self.request.GET:
compliance = Compliance.objects.filter()
else:
#apps = Application.objects.filter(app_type__in=[1,2,3,4])
#approvals = Approval.objects.all()
apps = []
approvals = []
compliance = []
context['apps_list'] = []
for lu in apps:
row = {}
lu.text_found = ''
if len(query_str_split) > 0:
for se_wo in query_str_split:
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.title)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.related_permits)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.address)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.description)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.jetties)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.drop_off_pick_up)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.sullage_disposal)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.waste_disposal)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.refuel_location_method)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.berth_location)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.anchorage)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.operating_details)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.proposed_development_current_use_of_land)
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.proposed_development_description)
if lu.app_type in [1,2,3]:
lu.app_group = 'application'
elif lu.app_type in [4]:
lu.app_group = 'emergency'
row['row'] = lu
context['apps_list'].append(row)
for lu in approvals:
row = {}
lu.text_found = ''
if len(query_str_split) > 0:
for se_wo in query_str_split:
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.title)
lu.app_group = 'approval'
row['row'] = lu
context['apps_list'].append(row)
for lu in compliance:
row = {}
lu.text_found = ''
if len(query_str_split) > 0:
for se_wo in query_str_split:
lu.text_found += self.slice_keyword(" "+se_wo+" ", lu.title)
lu.app_group = 'compliance'
row['row'] = lu
context['apps_list'].append(row)
if 'q' in self.request.GET and self.request.GET['q']:
context['query_string'] = self.request.GET['q']
return context
def slice_keyword(self,keyword,text_string):
if text_string is None:
return ''
if len(text_string) < 1:
return ''
text_string = " "+ text_string.lower() + " "
splitr= text_string.split(keyword.lower())
splitr_len = len(splitr)
text_found = ''
loopcount = 0
if splitr_len < 2:
return ''
for t in splitr:
loopcount = loopcount + 1
text_found += t[-20:]
if loopcount > 1:
if loopcount == splitr_len:
break
text_found += "<b>"+keyword+"</b>"
if len(text_found) > 2:
text_found = text_found + '...'
return text_found
class SearchReference(ListView):
model = Compliance
template_name = 'applications/search_reference_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing = ""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(SearchReference, self).get(request, *args, **kwargs)
def render_to_response(self, context):
# print "YESS"
# print context['form_prefix']
# print context['form_no']
# form = form_class(request.POST)
if len(context['form_prefix']) > 0:
if context['form_no'] > 0:
if context['form_prefix'] == 'EW-' or context['form_prefix'] == 'WO-':
apps = Application.objects.filter(id=context['form_no'])
if len(apps) > 0:
return HttpResponseRedirect(reverse('application_detail', args=(context['form_no'],)))
else:
if context['form_prefix'] == 'EW-':
messages.error(self.request, 'Emergency Works does not exist.')
if context['form_prefix'] == 'WO-':
messages.error(self.request, 'Application does not exist.')
return HttpResponseRedirect(reverse('search_reference'))
elif context['form_prefix'] == 'AP-':
approval = Approval.objects.filter(id=context['form_no'])
if len(approval) > 0:
return HttpResponseRedirect(reverse('approval_detail', args=(context['form_no'],)))
else:
messages.error(self.request, 'Approval does not exist.')
elif context['form_prefix'] == 'CO-':
comp = Compliance.objects.filter(approval_id=context['form_no'])
if len(comp) > 0:
return HttpResponseRedirect(reverse('compliance_approval_detail', args=(context['form_no'],)))
else:
messages.error(self.request, 'Compliance does not exist.')
elif context['form_prefix'] == 'AC-':
person = EmailUser.objects.filter(id=context['form_no'])
if len(person) > 0:
return HttpResponseRedirect(reverse('person_details_actions', args=(context['form_no'],'personal')))
else:
messages.error(self.request, 'Person account does not exist.')
elif context['form_prefix'] == 'OG-':
org = Organisation.objects.filter(id=context['form_no'])
if len(org) > 0:
return HttpResponseRedirect(reverse('organisation_details_actions', args=(context['form_no'],'company')))
else:
messages.error(self.request, 'Organisation does not exist.')
elif context['form_prefix'] == 'AR-':
org_pend = OrganisationPending.objects.filter(id=context['form_no'])
if len(org_pend) > 0:
return HttpResponseRedirect(reverse('organisation_access_requests_view', args=(context['form_no'])))
else:
messages.error(self.request, 'Company Access Request does not exist.')
else:
messages.error(self.request, 'Invalid Prefix Provided, Valid Prefix are EW- WO- AP- CO- AC- OG- AR-')
return HttpResponseRedirect(reverse('search_reference'))
else:
messages.error(self.request, 'Invalid Prefix Provided, Valid Prefix are EW- WO- AP- CO- AC- OG- AR-')
#return HttpResponseRedirect(reverse('search_reference'))
#if len(context['form_prefix']) == 0:
# messages.error(self.request, 'Invalid Prefix Provided, Valid Prefix are EW- WO- AP- CO- AC- OG- AR-')
# #return HttpResponseRedirect(reverse('search_reference'))
# if context['form_prefix'] == 'EW-' or context['form_prefix'] == 'WO-' or) > 0:
# messages.error(self.request, 'Invalid Prefix Provided, Valid Prefix EW- WO- AP- CO-')
# return HttpResponseRedirect(reverse('search_reference'))
# print self
#context['messages'] = self.messages
template = get_template(self.template_name)
#context = RequestContext(self.request, context)
return HttpResponse(template.render(context))
def get_context_data(self, **kwargs):
# def get(self, request, *args, **kwargs):
context = {}
# print 'test'
context = super(SearchReference, self).get_context_data(**kwargs)
context = template_context(self.request)
context['messages'] = messages.get_messages(self.request)
context['query_string'] = ''
context['form_prefix'] = ''
context['form_no'] = ''
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
form_prefix = query_str[:3]
form_no = query_str.replace(form_prefix,'')
context['form_prefix'] = form_prefix
if len(form_no) > 0:
context['form_no'] = int(form_no)
else:
context['form_no'] = 0
context['query_string'] = self.request.GET['q']
return context
class ApplicationCreateEW(LoginRequiredMixin, CreateView):
form_class = apps_forms.ApplicationCreateForm
template_name = 'applications/application_form.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationCreateEW, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationCreateEW, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new application'
return context
def get_form_kwargs(self):
kwargs = super(ApplicationCreateEW, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_initial(self):
initial = {}
initial['app_type'] = 4
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(ApplicationCreateEW, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
# If this is not an Emergency Works set the applicant as current user
if not (self.object.app_type == Application.APP_TYPE_CHOICES.emergency):
self.object.applicant = self.request.user
self.object.assignee = self.request.user
self.object.submitted_by = self.request.user
self.object.assignee = self.request.user
self.object.submit_date = date.today()
self.object.state = self.object.APP_STATE_CHOICES.draft
self.object.app_type = 4
processor = Group.objects.get(name='Statdev Processor')
self.object.group = processor
self.object.save()
success_url = reverse('application_update', args=(self.object.pk,))
return HttpResponseRedirect(success_url)
class ApplicationCreate(LoginRequiredMixin, CreateView):
form_class = apps_forms.ApplicationCreateForm
template_name = 'applications/application_form.html'
def get_context_data(self, **kwargs):
context = super(ApplicationCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new application'
return context
def get_form_kwargs(self):
kwargs = super(ApplicationCreate, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(ApplicationCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
# If this is not an Emergency Works set the applicant as current user
if not (self.object.app_type == Application.APP_TYPE_CHOICES.emergency):
self.object.applicant = self.request.user
self.object.assignee = self.request.user
self.object.submitted_by = self.request.user
self.object.assignee = self.request.user
self.object.submit_date = date.today()
self.object.state = self.object.APP_STATE_CHOICES.new
self.object.save()
success_url = reverse('application_update', args=(self.object.pk,))
return HttpResponseRedirect(success_url)
class CreateAccount(LoginRequiredMixin, CreateView):
form_class = apps_forms.CreateAccountForm
template_name = 'applications/create_account_form.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(CreateAccount, self).get(request, *args, **kwargs)
# def get(self, request, *args, **kwargs):
# #if self.request.user.groups.filter(name__in=['Processor']).exists():
# # app = Application.objects.create(submitted_by=self.request.user
# # ,submit_date=date.today()
# # ,state=Application.APP_STATE_CHOICES.new
# # )
# # return HttpResponseRedirect("/applications/"+str(app.id)+"/apply/apptype/")
# return super(CreateAccount, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(CreateAccount, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new account'
return context
def get_form_kwargs(self):
kwargs = super(CreateAccount, self).get_form_kwargs()
#kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(CreateAccount, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
forms_data = form.cleaned_data
self.object.save()
# If this is not an Emergency Works set the applicant as current user
# success_url = reverse('first_login_info', args=(self.object.pk,1))
app_id = None
if 'application_id' in self.kwargs:
app_id = self.kwargs['application_id']
if app_id is None:
success_url = "/first-login/"+str(self.object.pk)+"/1/"
else:
success_url = "/first-login/"+str(self.object.pk)+"/1/"+str(app_id)+"/"
return HttpResponseRedirect(success_url)
class ApplicationApply(LoginRequiredMixin, CreateView):
form_class = apps_forms.ApplicationApplyForm
template_name = 'applications/application_apply_form.html'
def get(self, request, *args, **kwargs):
if self.request.user.groups.filter(name__in=['Statdev Processor', 'Statdev Assessor']).exists():
app = Application.objects.create(submitted_by=self.request.user
,submit_date=date.today()
,state=Application.APP_STATE_CHOICES.new
,status=3
#,assignee=self.request.user
)
return HttpResponseRedirect("/applications/"+str(app.id)+"/apply/apptype/")
return super(ApplicationApply, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationApply, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new application'
return context
def get_form_kwargs(self):
kwargs = super(ApplicationApply, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(ApplicationApply, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
forms_data = form.cleaned_data
# If this is not an Emergency Works set the applicant as current user
if not (self.object.app_type == Application.APP_TYPE_CHOICES.emergency):
self.object.applicant = self.request.user
self.object.assignee = self.request.user
self.object.submitted_by = self.request.user
self.object.assignee = self.request.user
self.object.submit_date = date.today()
self.object.state = self.object.APP_STATE_CHOICES.draft
self.object.status = 3
self.object.save()
apply_on_behalf_of = forms_data['apply_on_behalf_of']
if apply_on_behalf_of == '1':
nextstep = 'apptype'
else:
nextstep = 'info'
success_url = reverse('application_apply_form', args=(self.object.pk,nextstep))
return HttpResponseRedirect(success_url)
class ApplicationApplyUpdate(LoginRequiredMixin, UpdateView):
model = Application
form_class = apps_forms.ApplicationApplyUpdateForm
def get(self, request, *args, **kwargs):
return super(ApplicationApplyUpdate, self).get(request, *args, **kwargs)
def get_initial(self):
initial = super(ApplicationApplyUpdate, self).get_initial()
initial['action'] = self.kwargs['action']
# initial['organisations_list'] = list(i.organisation for i in Delegate.objects.filter(email_user=self.request.user))
initial['organisations_list'] = []
row = ()
for i in Delegate.objects.filter(email_user=self.request.user):
initial['organisations_list'].append((i.organisation.id,i.organisation.name))
initial['is_staff'] = False
if self.request.user.is_staff == True:
initial['is_staff'] = True
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = self.get_object().application_set.first()
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationApplyUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
forms_data = form.cleaned_data
action = self.kwargs['action']
nextstep = ''
apply_on_behalf_of = 0
if 'apply_on_behalf_of' in forms_data:
apply_on_behalf_of = forms_data['apply_on_behalf_of']
if action == 'new':
if apply_on_behalf_of == '1':
nextstep = 'apptype'
else:
nextstep = 'info'
elif action == 'info':
nextstep = 'apptype'
app = Application.objects.get(pk=self.object.pk)
if self.object.app_type == 4:
self.object.group = Group.objects.get(name='Statdev Assessor')
self.object.assignee = self.request.user
self.object.save()
if action == 'apptype':
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists() or self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
success_url = reverse('applicant_change', args=(self.object.pk,))
else:
success_url = reverse('application_update', args=(self.object.pk,))
else:
success_url = reverse('application_apply_form', args=(self.object.pk,nextstep))
return HttpResponseRedirect(success_url)
class ApplicationDetail(DetailView):
model = Application
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
user_id = None
if app.state == 18:
return HttpResponseRedirect(reverse('application_booking', args=(app.id,)))
if request.user:
user_id = request.user.id
# start
if request.user.is_staff == True:
pass
elif request.user.is_superuser == True:
pass
elif app.submitted_by.id == user_id:
pass
elif app.applicant:
if app.applicant.id == user_id:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
elif Delegate.objects.filter(email_user=request.user).count() > 0:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
# Rule: if the application status is 'draft', it can be updated.
return super(ApplicationDetail, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationDetail, self).get_context_data(**kwargs)
app = self.get_object()
context['may_update'] = "False"
context['allow_admin_side_menu'] = "False"
context['is_staff'] = self.request.user.is_staff
# if app.group is not None:
emailcontext = {'user': 'Jason'}
if Location.objects.filter(application_id=self.object.id).exists():
context['location'] = Location.objects.get(application_id=self.object.id)
else:
#context['location'] = Location
context['location'] = ''
#sendHtmlEmail(['jason.moore@dpaw.wa.gov.au'],'HTML TEST EMAIL',emailcontext,'email.html' ,None,None,None)
#emailGroup('HTML TEST EMAIL',emailcontext,'email.html' ,None,None,None,'Processor')
if app.assignee is not None:
context['application_assignee_id'] = app.assignee.id
context['may_assign_to_person'] = 'False'
usergroups = self.request.user.groups.all()
context['stakeholder_communication'] = StakeholderComms.objects.filter(application=app)
#print ("STAKE HOLDER")
#print (context['stakeholder_communication'])
# print app.group
#if app.group in usergroups:
# if float(app.routeid) > 1:
# context['may_assign_to_person'] = 'True'
if app.app_type == app.APP_TYPE_CHOICES.part5:
self.template_name = 'applications/application_details_part5_new_application.html'
part5 = Application_Part5()
context = part5.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.part5cr:
self.template_name = 'applications/application_part5_ammendment_request.html'
part5 = Application_Part5()
context = part5.get(app, self, context)
#flow = Flow()
#workflowtype = flow.getWorkFlowTypeFromApp(app)
#flow.get(workflowtype)
#context = flow.getAccessRights(self.request,context,app.routeid,workflowtype)
#context = flow.getCollapse(context,app.routeid,workflowtype)
#context = flow.getHiddenAreas(context,app.routeid,workflowtype)
#context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
#context['formcomponent'] = flow.getFormComponent(app.routeid,workflowtype)
elif app.app_type == app.APP_TYPE_CHOICES.part5amend:
self.template_name = 'applications/application_part5_amend.html'
part5 = Application_Part5()
context = part5.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.emergency:
self.template_name = 'applications/application_detail_emergency.html'
emergency = Application_Emergency()
context = emergency.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.permit:
self.template_name = 'applications/application_detail_permit.html'
permit = Application_Permit()
context = permit.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.licence:
self.template_name = 'applications/application_detail_license.html'
licence = Application_Licence()
context = licence.get(app, self, context)
else:
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(self.request,context,app.routeid,workflowtype)
context = flow.getCollapse(context,app.routeid,workflowtype)
context = flow.getHiddenAreas(context,app.routeid,workflowtype,self.request)
context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
context['formcomponent'] = flow.getFormComponent(app.routeid,workflowtype)
# print context['workflow_actions']
# print context['allow_admin_side_menu']
# context = flow.getAllGroupAccess(request,context,app.routeid,workflowtype)
# may_update has extra business rules
if float(app.routeid) > 0:
if app.assignee is None:
context['may_update'] = "False"
context['workflow_actions'] = []
if context['may_update'] == "True":
if app.assignee != self.request.user:
context['may_update'] = "False"
del context['workflow_actions']
context['workflow_actions'] = []
if app.assignee != self.request.user:
del context['workflow_actions']
context['workflow_actions'] = []
context['may_update_vessels_list'] = "False"
context['application_history'] = self.get_application_history(app, [])
return context
def get_application_history(self,app,ah):
ah = self.get_application_history_up(app,ah)
ah = self.get_application_history_down(app,ah)
return ah
def get_application_history_up(self,app,ah):
if app:
application = Application.objects.filter(old_application=app)
if application.count() > 0:
ah.append({'id': application[0].id, 'title': application[0].title})
ah = self.get_application_history_up(application[0],ah)
return ah
def get_application_history_down(self,app,ah):
if app.old_application:
ah.append({'id': app.old_application.id, 'title': app.old_application.title})
ah = self.get_application_history_down(app.old_application,ah)
return ah
class ApplicationDetailPDF(LoginRequiredMixin,ApplicationDetail):
"""This view is a proof of concept for synchronous, server-side PDF generation.
Depending on performance and resource constraints, this might need to be
refactored to use an asynchronous task.
"""
template_name = 'applications/application_detail_pdf.html'
def get(self, request, *args, **kwargs):
response = super(ApplicationDetailPDF, self).get(request)
options = {
'page-size': 'A4',
'encoding': 'UTF-8',
}
# Generate the PDF as a string, then use that as the response body.
output = pdfkit.from_string(
response.rendered_content, False, options=options)
# TODO: store the generated PDF as a Record object.
response = HttpResponse(output, content_type='application/pdf')
obj = self.get_object()
response['Content-Disposition'] = 'attachment; filename=application_{}.pdf'.format(
obj.pk)
return response
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(id=kwargs['pk'])
if app.state == app.APP_STATE_CHOICES.new:
app.delete()
return HttpResponseRedirect(reverse('application_list'))
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationDetailPDF, self).post(request, *args, **kwargs)
class AccountActions(LoginRequiredMixin,DetailView):
model = EmailUser
template_name = 'applications/account_actions.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(AccountActions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(AccountActions, self).get_context_data(**kwargs)
obj = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['actions'] = Action.objects.filter(
content_type=ContentType.objects.get_for_model(obj), object_id=obj.pk).order_by('-timestamp')
return context
class OrganisationActions(LoginRequiredMixin,DetailView):
model = Organisation
template_name = 'applications/organisation_actions.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationActions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationActions, self).get_context_data(**kwargs)
obj = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['actions'] = Action.objects.filter(
content_type=ContentType.objects.get_for_model(obj), object_id=obj.pk).order_by('-timestamp')
return context
class OrganisationARActions(LoginRequiredMixin,DetailView):
model = OrganisationPending
template_name = 'applications/organisation_ar_actions.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationARActions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationARActions, self).get_context_data(**kwargs)
obj = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['actions'] = Action.objects.filter(
content_type=ContentType.objects.get_for_model(obj), object_id=obj.pk).order_by('-timestamp')
return context
class ApplicationActions(LoginRequiredMixin,DetailView):
model = Application
template_name = 'applications/application_actions.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
app = self.get_object()
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context_processor = flow.getAccessRights(request, context_processor, app.routeid, workflowtype)
#admin_staff = context_processor['admin_staff']
may_view_action_log = context_processor['may_view_action_log']
print ("ACTION LOG")
print (may_view_action_log)
if may_view_action_log== 'True':
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationActions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationActions, self).get_context_data(**kwargs)
app = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['actions'] = Action.objects.filter(
content_type=ContentType.objects.get_for_model(app), object_id=app.pk).order_by('-timestamp')
return context
class ApplicationComms(LoginRequiredMixin,DetailView):
model = Application
template_name = 'applications/application_comms.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
#admin_staff = context_processor['admin_staff']
#if admin_staff == True:
app = self.get_object()
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context_processor = flow.getAccessRights(request, context_processor, app.routeid, workflowtype)
print ("COMMS LOG")
print (context_processor['may_view_comm_log'])
may_view_comm_log = context_processor['may_view_comm_log']
if may_view_comm_log== 'True':
pass
#elif request.user.is_staff == True:
# pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationComms, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationComms, self).get_context_data(**kwargs)
app = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['communications'] = Communication.objects.filter(application_id=app.pk).order_by('-created')
return context
class ApplicationCommsView(LoginRequiredMixin,TemplateView):
model = Application
#form_class = apps_forms.CommunicationCreateForm
template_name = 'applications/application_comms_view.html'
def get_context_data(self, **kwargs):
context = super(ApplicationCommsView, self).get_context_data(**kwargs)
context['page_heading'] = 'View communication'
context['file_group'] = '2003'
context['file_group_ref_id'] = self.kwargs['pk']
context['communication_entry'] = Communication.objects.get(pk=self.kwargs['comms_id'])
print (context['communication_entry'])
return context
class ApplicationCommsCreate(LoginRequiredMixin,CreateView):
model = Communication
form_class = apps_forms.CommunicationCreateForm
template_name = 'applications/application_comms_create.html'
def get_context_data(self, **kwargs):
context = super(ApplicationCommsCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new communication'
context['file_group'] = '2003'
context['file_group_ref_id'] = self.kwargs['pk']
return context
def get_initial(self):
initial = {}
initial['application'] = self.kwargs['pk']
#initial['records_json'] = []
#initial['records'] = []
return initial
def get_form_kwargs(self):
kwargs = super(ApplicationCommsCreate, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('home_page'))
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationCommsCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
app_id = self.kwargs['pk']
application = Application.objects.get(id=app_id)
self.object.application = application
self.object.save()
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
# if self.request.FILES.get('records'):
# if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), ['.pdf','.xls','.doc','.jpg','.png','.xlsx','.docx','.msg']) is False:
# raise ValidationError('Documents attached contains and unallowed attachment extension.')
#
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.name = f.name
# doc.save()
# self.object.records.add(doc)
self.object.save()
# If this is not an Emergency Works set the applicant as current user
success_url = reverse('application_comms', args=(app_id,))
return HttpResponseRedirect(success_url)
class AccountComms(LoginRequiredMixin,DetailView):
model = EmailUser
template_name = 'applications/account_comms.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(AccountComms, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(AccountComms, self).get_context_data(**kwargs)
u = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['communications'] = CommunicationAccount.objects.filter(user=u.pk).order_by('-created')
return context
class AccountCommsCreate(LoginRequiredMixin,CreateView):
model = CommunicationAccount
form_class = apps_forms.CommunicationAccountCreateForm
template_name = 'applications/application_comms_create.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(AccountCommsCreate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(AccountCommsCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new account communication'
context['file_group'] = '2004'
context['file_group_ref_id'] = self.kwargs['pk']
return context
def get_initial(self):
initial = {}
initial['application'] = self.kwargs['pk']
return initial
def get_form_kwargs(self):
kwargs = super(AccountCommsCreate, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(AccountCommsCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
user_id = self.kwargs['pk']
user = EmailUser.objects.get(id=user_id)
self.object.user = user
self.object.save()
if self.request.FILES.get('records'):
if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), None) is False:
raise ValidationError('Documents attached contains and unallowed attachment extension.')
for f in self.request.FILES.getlist('records'):
doc = Record()
doc.upload = f
doc.name = f.name
doc.save()
self.object.records.add(doc)
self.object.save()
# If this is not an Emergency Works set the applicant as current user
success_url = reverse('account_comms', args=(user_id,))
return HttpResponseRedirect(success_url)
class ComplianceComms(LoginRequiredMixin,DetailView):
model = Compliance
template_name = 'applications/compliance_comms.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceComms, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceComms, self).get_context_data(**kwargs)
c = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['communications'] = CommunicationCompliance.objects.filter(compliance=c.pk).order_by('-created')
return context
class ComplianceCommsCreate(LoginRequiredMixin,CreateView):
model = CommunicationCompliance
form_class = apps_forms.CommunicationComplianceCreateForm
template_name = 'applications/compliance_comms_create.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceCommsCreate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceCommsCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new account communication'
return context
def get_initial(self):
initial = {}
initial['compliance'] = self.kwargs['pk']
return initial
def get_form_kwargs(self):
kwargs = super(ComplianceCommsCreate, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(ComplianceCommsCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
c_id = self.kwargs['pk']
c = Compliance.objects.get(id=c_id)
self.object.compliance = c
self.object.save()
if self.request.FILES.get('records'):
if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), None) is False:
raise ValidationError('Documents attached contains and unallowed attachment extension.')
for f in self.request.FILES.getlist('records'):
doc = Record()
doc.upload = f
doc.name = f.name
doc.save()
self.object.records.add(doc)
self.object.save()
# If this is not an Emergency Works set the applicant as current user
success_url = reverse('compliance_comms', args=(c_id,))
return HttpResponseRedirect(success_url)
class OrganisationComms(LoginRequiredMixin,DetailView):
model = Organisation
template_name = 'applications/organisation_comms.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationComms, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationComms, self).get_context_data(**kwargs)
org = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['communications'] = CommunicationOrganisation.objects.filter(org=org.pk).order_by('-created')
return context
class ReferralList(LoginRequiredMixin,ListView):
model = Application
template_name = 'applications/referral_list.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ReferralList, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralList, self).get_context_data(**kwargs)
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
search_filter = Q()
for se_wo in query_str_split:
search_filter &= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
context['items'] = Referral.objects.filter(referee=self.request.user)
return context
class ReferralConditions(UpdateView):
"""A view for updating a referrals condition feedback.
"""
model = Application
form_class = apps_forms.ApplicationReferralConditionsPart5
template_name = 'public/application_form.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
# refcount = Referral.objects.filter(referee=self.request.user).count()
refcount = Referral.objects.filter(application=app,referee=self.request.user).exclude(status=5).count()
if refcount == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ReferralConditions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralConditions, self).get_context_data(**kwargs)
app_id = self.kwargs['pk']
context['page_heading'] = 'Application for new Part 5 - '+app_id
context['left_sidebar'] = 'yes'
#context['action'] = self.kwargs['action']
app = self.get_object()
referral = Referral.objects.get(application=app,referee=self.request.user)
multifilelist = []
a1 = referral.records.all()
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['extension'] = b1.extension
fileitem['file_url'] = b1.file_url()
fileitem['file_name'] = b1.name
multifilelist.append(fileitem)
context['records'] = multifilelist
if Location.objects.filter(application_id=self.object.id).exists():
context['location'] = Location.objects.get(application_id=app.id)
else:
context['location'] = {}
context['referral'] = Referral.objects.get(application=app,referee=self.request.user)
return context
def get_initial(self):
initial = super(ReferralConditions, self).get_initial()
app = self.get_object()
# print self.request.user.email
referral = Referral.objects.get(application=app,referee=self.request.user)
#print referral.feedback
initial['application_id'] = self.kwargs['pk']
initial['application_app_type'] = app.app_type
initial['organisation'] = app.organisation
initial['referral_email'] = referral.referee.email
initial['referral_name'] = referral.referee.first_name + ' ' + referral.referee.last_name
initial['referral_status'] = referral.status
initial['proposed_conditions'] = referral.proposed_conditions
initial['comments'] = referral.feedback
initial['response_date'] = referral.response_date
initial['state'] = app.state
multifilelist = []
a1 = referral.records.all()
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['records'] = multifilelist
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(id=kwargs['pk'])
if app.state == app.APP_STATE_CHOICES.new:
app.delete()
return HttpResponseRedirect(reverse('application_list'))
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ReferralConditions, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the state to draft is this is a new application.
"""
forms_data = form.cleaned_data
self.object = form.save(commit=False)
app_id = self.kwargs['pk']
#action = self.kwargs['action']
status=None
application = Application.objects.get(id=app_id)
referral = Referral.objects.get(application_id=app_id,referee=self.request.user)
referral.feedback = forms_data['comments']
referral.proposed_conditions = forms_data['proposed_conditions']
referral.response_date = date.today()
referral.status = Referral.REFERRAL_STATUS_CHOICES.responded
# records = referral.records.all()
# for la_co in records:
# if 'records-clear_multifileid-' + str(la_co.id) in form.data:
# referral.records.remove(la_co)
if 'records_json' in self.request.POST:
json_data = json.loads(self.request.POST['records_json'])
print (json_data)
referral.records.remove()
for d in referral.records.all():
referral.records.remove(d)
for i in json_data:
print ("RECORD REFERRALS")
print (i)
doc = Record.objects.get(id=i['doc_id'])
referral.records.add(doc)
# if self.request.FILES.get('records'):
# if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), None) is False:
# raise ValidationError('Documents attached contains and unallowed attachment extension.')
#
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.name = f.name
# doc.save()
# referral.records.add(doc)
referral.save()
refnextaction = Referrals_Next_Action_Check()
refactionresp = refnextaction.get(application)
if refactionresp == True:
app_updated = refnextaction.go_next_action(application)
# Record an action.
action = Action(
content_object=application,
action='No outstanding referrals, application status set to "{}"'.format(app_updated.get_state_display()))
action.save()
return HttpResponseRedirect('/')
class OrganisationCommsCreate(LoginRequiredMixin,CreateView):
model = CommunicationOrganisation
form_class = apps_forms.CommunicationOrganisationCreateForm
template_name = 'applications/organisation_comms_create.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(OrganisationCommsCreate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationCommsCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new organisation communication'
context['org_id'] = self.kwargs['pk']
return context
def get_initial(self):
initial = {}
initial['org_id'] = self.kwargs['pk']
return initial
def get_form_kwargs(self):
kwargs = super(OrganisationCommsCreate, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationCommsCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the assignee as the object creator.
"""
self.object = form.save(commit=False)
org_id = self.kwargs['pk']
org = Organisation.objects.get(id=org_id)
self.object.org_id = org.id
self.object.save()
if self.request.FILES.get('records'):
if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), None) is False:
raise ValidationError('Documents attached contains and unallowed attachment extension.')
for f in self.request.FILES.getlist('records'):
doc = Record()
doc.upload = f
doc.name = f.name
doc.save()
self.object.records.add(doc)
self.object.save()
# If this is not an Emergency Works set the applicant as current user
success_url = reverse('organisation_comms', args=(org_id,))
return HttpResponseRedirect(success_url)
class ApplicationChange(LoginRequiredMixin, CreateView):
"""This view is for changes or ammendents to existing applications
"""
#@model = Application
form_class = apps_forms.ApplicationChange
template_name = 'applications/application_change_form.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
action = self.kwargs['action']
approval = Approval.objects.get(id=self.kwargs['approvalid'])
application = Application.objects.get(id=approval.application.id)
if action == 'requestamendment':
app = Application.objects.create(applicant=self.request.user,
assignee=self.request.user,
submitted_by=self.request.user,
app_type=5,
submit_date=date.today(),
state=Application.APP_STATE_CHOICES.new,
approval_id=approval.id,
title=approval.title,
old_approval_id = approval.id
)
return HttpResponseRedirect(reverse('application_update', args=(app.id,)))
if action == 'amend':
if approval.app_type == 3:
if approval.ammendment_application:
app = self.copy_application(approval, application)
app.app_type=6
app.save()
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.create, user=self.request.user,
action='Application copied from application : WO-{}, Approval : AP-{}'.format(str(approval.application.id), str(approval.id)))
action.save()
return HttpResponseRedirect(reverse('application_update', args=(app.id,)))
elif approval.app_type == 1:
app = self.copy_application(approval, application)
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.create, user=self.request.user,
action='Application copied from application : WO-{}, Approval : AP-{}'.format(str(approval.application.id), str(approval.id)))
action.save()
return HttpResponseRedirect(reverse('application_update', args=(app.id,)))
elif approval.app_type == 2:
app = self.copy_application(approval, application)
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.create, user=self.request.user,
action='Application copied from application : WO-{}, Approval : AP-{}'.format(str(approval.application.id), str(approval.id)))
action.save()
return HttpResponseRedirect(reverse('application_update', args=(app.id,)))
return super(ApplicationChange, self).get(request, *args, **kwargs)
def copy_application(self, approval, application):
app = Application.objects.create(applicant=approval.application.applicant,
title=approval.application.title,
assignee=self.request.user,
description=approval.application.description,
proposed_commence=approval.application.proposed_commence,
proposed_end=approval.application.proposed_end,
cost=approval.application.cost,
project_no=approval.application.project_no,
related_permits=approval.application.related_permits,
over_water=approval.application.over_water,
vessel_or_craft_details=approval.application.vessel_or_craft_details,
purpose=approval.application.purpose,
max_participants=approval.application.max_participants,
proposed_location=approval.application.proposed_location,
address=approval.application.address,
jetties=approval.application.jetties,
jetty_dot_approval=approval.application.jetty_dot_approval,
jetty_dot_approval_expiry=approval.application.jetty_dot_approval_expiry,
drop_off_pick_up=approval.application.drop_off_pick_up,
food=approval.application.food,
beverage=approval.application.beverage,
liquor_licence=approval.application.liquor_licence,
byo_alcohol=approval.application.byo_alcohol,
sullage_disposal=approval.application.sullage_disposal,
waste_disposal=approval.application.waste_disposal,
refuel_location_method=approval.application.refuel_location_method,
berth_location=approval.application.berth_location,
anchorage=approval.application.anchorage,
operating_details=approval.application.operating_details,
river_lease_require_river_lease=approval.application.river_lease_require_river_lease,
river_lease_reserve_licence=approval.application.river_lease_reserve_licence,
river_lease_application_number=approval.application.river_lease_application_number,
proposed_development_current_use_of_land=approval.application.proposed_development_current_use_of_land,
proposed_development_description=approval.application.proposed_development_description,
type_of_crafts=approval.application.type_of_crafts,
number_of_crafts=approval.application.number_of_crafts,
landowner=approval.application.landowner,
land_description=approval.application.land_description,
submitted_by=self.request.user,
app_type=approval.application.app_type,
submit_date=date.today(),
state=Application.APP_STATE_CHOICES.new,
approval_id=approval.id,
old_application=approval.application,
old_approval_id=approval.id
)
a1 = approval.application.records.all()
for b1 in a1:
app.records.add(b1)
a1 = approval.application.vessels.all()
for b1 in a1:
app.vessels.add(b1)
a1 = approval.application.location_route_access.all()
for b1 in a1:
app.location_route_access.add(b1)
a1 = approval.application.cert_public_liability_insurance.all()
for b1 in a1:
app.cert_public_liability_insurance.add(b1)
a1 = approval.application.risk_mgmt_plan.all()
for b1 in a1:
app.risk_mgmt_plan.add(b1)
a1 = approval.application.safety_mgmt_procedures.all()
for b1 in a1:
app.safety_mgmt_procedures.add(b1)
a1 = approval.application.brochures_itineries_adverts.all()
for b1 in a1:
app.brochures_itineries_adverts.add(b1)
a1 = approval.application.other_relevant_documents.all()
for b1 in a1:
app.other_relevant_documents.add(b1)
a1 = approval.application.land_owner_consent.all()
for b1 in a1:
app.land_owner_consent.add(b1)
a1 = approval.application.deed.all()
for b1 in a1:
app.deed.add(b1)
a1 = approval.application.river_lease_scan_of_application.all()
for b1 in a1:
app.river_lease_scan_of_application.add(b1)
a1 = approval.application.proposed_development_plans.all()
for b1 in a1:
app.proposed_development_plans.add(b1)
app.save()
locobj = Location.objects.get(application_id=application.id)
new_loc = Location()
new_loc.application_id = app.id
new_loc.title_volume = locobj.title_volume
new_loc.folio = locobj.folio
new_loc.dpd_number = locobj.dpd_number
new_loc.location = locobj.location
new_loc.reserve = locobj.reserve
new_loc.street_number_name = locobj.street_number_name
new_loc.suburb = locobj.suburb
new_loc.lot = locobj.lot
new_loc.intersection = locobj.intersection
new_loc.local_government_authority = locobj.local_government_authority
new_loc.save()
conditions = Condition.objects.filter(application_id=application.id)
for c in conditions:
copied_condition=Condition.objects.create(application=app,
condition_no=c.condition_no,
condition=c.condition,
referral=c.referral,
status=c.status,
due_date=c.due_date,
recur_pattern=c.recur_pattern,
recur_freq=c.recur_freq,
suspend=c.suspend,
advise_no=c.advise_no,
advise=c.advise,
)
a1 = c.records.all()
for b1 in a1:
copied_condition.records.add(b1)
copied_condition.save()
referrals=Referral.objects.filter(application=application)
for r in referrals:
copied_referral=Referral.objects.create(application=app,
referee=r.referee,
details=r.details,
period=r.period
)
return app
def get_context_data(self, **kwargs):
context = super(ApplicationChange, self).get_context_data(**kwargs)
context['page_heading'] = 'Update application details'
return context
def get_form_kwargs(self):
kwargs = super(ApplicationChange, self).get_form_kwargs()
return kwargs
def get_initial(self):
initial = {}
action = self.kwargs['action']
approval = Approval.objects.get(id=self.kwargs['approvalid'])
application = Application.objects.get(id=approval.application.id)
initial['title'] = application.title
initial['description'] = application.description
# initial['cost'] = application.cost
if action == "amend":
if approval.app_type == 3:
if approval.ammendment_application:
initial['app_type'] = 6
else:
raise ValidationError('There was and error raising your Application Change.')
elif approval.app_type == 1:
initial['app_type'] = 1
elif approval.app_type == 2:
initial['app_type'] = 2
elif action == 'requestamendment':
initial['app_type'] = 5
elif action == 'renewlicence':
initial['app_type'] = 5
elif action == 'renewlicence':
initial['app_type'] = 11
elif action == 'renewpermit':
initial['app_type'] = 10
else:
raise ValidationError('There was and error raising your Application Change.')
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationChange, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the state to draft is this is a new application.
"""
self.object = form.save(commit=False)
action = self.kwargs['action']
forms_data = form.cleaned_data
approval = Approval.objects.get(id=self.kwargs['approvalid'])
application = Application.objects.get(id=approval.application.id)
if action == "amend":
if approval.ammendment_application:
self.object.app_type = 6
else:
raise ValidationError('There was and error raising your Application Change.')
elif action == 'requestamendment':
self.object.app_type = 5
elif action == 'renewlicence':
self.object.app_type = 11
elif action == 'renewpermit':
self.object.app_type = 10
else:
raise ValidationError('There was and error raising your Application Change.')
self.object.proposed_development_description = forms_data['proposed_development_description']
self.object.applicant = self.request.user
self.object.assignee = self.request.user
self.object.submitted_by = self.request.user
self.object.assignee = self.request.user
self.object.submit_date = date.today()
self.object.state = self.object.APP_STATE_CHOICES.new
self.object.approval_id = approval.id
self.object.save()
if self.request.FILES.get('proposed_development_plans'):
if Attachment_Extension_Check('multi', self.request.FILES.getlist('proposed_development_plans'), None) is False:
raise ValidationError('Proposed Development Plans contains and unallowed attachment extension.')
for f in self.request.FILES.getlist('proposed_development_plans'):
doc = Record()
doc.upload = f
doc.name = f.name
doc.save()
self.object.proposed_development_plans.add(doc)
# self.object = form.save(commit=False)
return HttpResponseRedirect(self.get_success_url())
class ApplicationConditionTable(LoginRequiredMixin, DetailView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
template_name = 'applications/application_conditions_table.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
context = {}
if app.routeid is None:
app.routeid = 1
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#if self.request.user.groups.filter(name__in=['Processor']).exists():
# donothing = ''
# if context["may_update_publication_newspaper"] != "True":
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationConditionTable, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationConditionTable, self).get_context_data(**kwargs)
app = self.get_object()
if app.routeid is None:
app.routeid = 1
request = self.request
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
context['mode'] = 'update'
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
if context['application_assignee_id']:
context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
#part5 = Application_Part5()
#context = part5.get(app, self, context)
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
class ApplicationReferTable(LoginRequiredMixin, DetailView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
template_name = 'applications/application_referrals_table.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
context = {}
if app.routeid is None:
app.routeid = 1
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#if self.request.user.groups.filter(name__in=['Processor']).exists():
# donothing = ''
# if context["may_update_publication_newspaper"] != "True":
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationReferTable, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationReferTable, self).get_context_data(**kwargs)
app = self.get_object()
if app.routeid is None:
app.routeid = 1
request = self.request
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
context['mode'] = 'update'
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
if context['application_assignee_id']:
context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
#part5 = Application_Part5()
#context = part5.get(app, self, context)
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
class ApplicationVesselTable(LoginRequiredMixin, DetailView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
template_name = 'applications/application_vessels_table.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
context = {}
user_id = None
if request.user:
user_id = request.user.id
# start
if request.user.is_staff == True:
pass
elif request.user.is_superuser == True:
pass
elif app.submitted_by.id == user_id:
pass
elif app.applicant:
if app.applicant.id == user_id:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
elif Delegate.objects.filter(email_user=request.user).count() > 0:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect('/')
if app.routeid is None:
app.routeid = 1
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
context['application_assignee_id'] = self.request.user.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#if self.request.user.groups.filter(name__in=['Processor']).exists():
# donothing = ''
#if context['may_update_vessels_list'] != "True":
# messages.error(self.request, 'Forbidden from updating vessels')
# return HttpResponseRedirect(reverse('popup_error'))
return super(ApplicationVesselTable, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationVesselTable, self).get_context_data(**kwargs)
#context['page_heading'] = 'Update application details'
#context['left_sidebar'] = 'yes'
app = self.get_object()
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
request = self.request
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
context['application_assignee_id'] = self.request.user.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#context = flow.getCollapse(context,app.routeid,workflowtype)
#context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
#context['condactions'] = flow.getAllConditionBasedRouteActions(app.routeid)
#context['workflow'] = flow.getAllRouteConf(workflowtype,app.routeid)
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
class NewsPaperPublicationTable(LoginRequiredMixin, DetailView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
template_name = 'applications/application_publication_newspaper_table.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
context = {}
if app.routeid is None:
app.routeid = 1
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#if self.request.user.groups.filter(name__in=['Processor']).exists():
# donothing = ''
#if context["may_update_publication_newspaper"] != "True":
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(NewsPaperPublicationTable, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(NewsPaperPublicationTable, self).get_context_data(**kwargs)
app = self.get_object()
if app.routeid is None:
app.routeid = 1
request = self.request
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
part5 = Application_Part5()
context = part5.get(app, self, context)
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
class FeedbackTable(LoginRequiredMixin, DetailView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
template_name = 'applications/application_feedback_draft_table.html'
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
context = {}
if app.routeid is None:
app.routeid = 1
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
#if self.request.user.groups.filter(name__in=['Processor']).exists():
# donothing = ''
# if context["may_update_publication_newspaper"] != "True":
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(FeedbackTable, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(FeedbackTable, self).get_context_data(**kwargs)
app = self.get_object()
if app.routeid is None:
app.routeid = 1
request = self.request
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
context['action'] = self.kwargs['action']
if context['action'] == 'review':
self.template_name = 'applications/application_feedback_draft_review.html'
elif context['action'] == 'draft':
self.template_name = 'applications/application_feedback_draft_table.html'
elif context['action'] == 'final':
self.template_name = 'applications/application_feedback_final_table.html'
elif context['action'] == 'determination':
self.template_name = 'applications/application_feedback_determination_table.html'
part5 = Application_Part5()
context = part5.get(app, self, context)
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
class ApplicationUpdate(LoginRequiredMixin, UpdateView):
"""A view for updating a draft (non-lodged) application.
"""
model = Application
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be changed.
app = self.get_object()
if app.state == 18:
return HttpResponseRedirect(reverse('application_booking', args=(app.id,)))
if request.user.is_staff == True or request.user.is_superuser == True or app.submitted_by == request.user.id or app.applicant.id == request.user.id or Delegate.objects.filter(email_user=request.user).count() > 0:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if app.status == 1 or app.status == 3:
pass
else:
messages.error(self.request, 'Application is not active')
return HttpResponseRedirect("/")
if app.assignee is None and float(app.routeid) > 1:
messages.error(self.request, 'Must be assigned to you before any changes can be made.')
return HttpResponseRedirect("/")
# Rule: if the application status is 'draft', it can be updated.
context = {}
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
context['application_assignee_id'] = request.user.id
else:
context['application_assignee_id'] = None
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
if float(app.routeid) > 0:
if app.assignee is None:
context['may_update'] = "False"
if context['may_update'] == "True":
if app.assignee != self.request.user:
context['may_update'] = "False"
#if context['may_update'] != "True":
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
# else:
# if app.state != app.APP_STATE_CHOICES.draft and app.state != app.APP_STATE_CHOICES.new:
# messages.error(self.request, 'This application cannot be updated!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationUpdate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ApplicationUpdate, self).get_context_data(**kwargs)
context['page_heading'] = 'Update application details'
context['left_sidebar'] = 'yes'
context['mode'] = 'update'
app = self.get_object()
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
context['application_assignee_id'] = self.request.user.id
else:
context['application_assignee_id'] = None
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
request = self.request
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
# print context['workflowoptions']
flow.get(workflowtype)
context['workflowoptions'] = flow.getWorkflowOptions()
# print context['workflowoptions']
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
context = flow.getCollapse(context,app.routeid,workflowtype)
context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
context['condactions'] = flow.getAllConditionBasedRouteActions(app.routeid)
context['workflow'] = flow.getAllRouteConf(workflowtype,app.routeid)
context['stakeholder_communication'] = StakeholderComms.objects.filter(application=app)
if app.assignee is None:
context['workflow_actions'] = []
if app.app_type == app.APP_TYPE_CHOICES.part5:
part5 = Application_Part5()
context = part5.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.part5cr:
part5 = Application_Part5()
context = part5.get(app, self, context)
#flow = Flow()
#workflowtype = flow.getWorkFlowTypeFromApp(app)
#flow.get(workflowtype)
#context = flow.getAccessRights(self.request,context,app.routeid,workflowtype)
#context = flow.getCollapse(context,app.routeid,workflowtype)
#context = flow.getHiddenAreas(context,app.routeid,workflowtype)
#context['workflow_actions'] = flow.getAllRouteActions(app.routeid,workflowtype)
#context['formcomponent'] = flow.getFormComponent(app.routeid,workflowtype)
elif app.app_type == app.APP_TYPE_CHOICES.part5amend:
part5 = Application_Part5()
context = part5.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.emergency:
emergency = Application_Emergency()
context = emergency.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.permit:
permit = Application_Permit()
context = permit.get(app, self, context)
elif app.app_type == app.APP_TYPE_CHOICES.licence:
licence = Application_Licence()
context = licence.get(app, self, context)
try:
LocObj = Location.objects.get(application_id=app.id)
if LocObj:
context['certificate_of_title_volume'] = LocObj.title_volume
context['folio'] = LocObj.folio
context['diagram_plan_deposit_number'] = LocObj.dpd_number
context['location'] = LocObj.location
context['reserve_number'] = LocObj.reserve
context['street_number_and_name'] = LocObj.street_number_name
context['town_suburb'] = LocObj.suburb
context['lot'] = LocObj.lot
context['nearest_road_intersection'] = LocObj.intersection
context['local_government_authority'] = LocObj.local_government_authority
except ObjectDoesNotExist:
donothing = ''
context['application_approval'] = " fdadsfdsa"
if Approval.objects.filter(application=app).count() > 0:
context['application_approval'] = Approval.objects.filter(application=app)[0]
return context
def get_success_url(self,app):
return HttpResponseRedirect(app.get_absolute_url())
def get_form_class(self):
if self.object.app_type == self.object.APP_TYPE_CHOICES.licence:
return apps_forms.ApplicationLicencePermitForm
elif self.object.app_type == self.object.APP_TYPE_CHOICES.permit:
return apps_forms.ApplicationPermitForm
elif self.object.app_type == self.object.APP_TYPE_CHOICES.part5:
return apps_forms.ApplicationPart5Form
elif self.object.app_type == self.object.APP_TYPE_CHOICES.emergency:
return apps_forms.ApplicationEmergencyForm
elif self.object.app_type == self.object.APP_TYPE_CHOICES.permitamend:
return apps_forms.ApplicationPermitForm
elif self.object.app_type == self.object.APP_TYPE_CHOICES.licenceamend:
return apps_forms.ApplicationLicencePermitForm
else:
# Add default forms.py and use json workflow to filter and hide fields
return apps_forms.ApplicationPart5Form
def get_initial(self):
initial = super(ApplicationUpdate, self).get_initial()
initial['application_id'] = self.kwargs['pk']
app = self.get_object()
initial['organisation'] = app.organisation
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
request = self.request
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
flowcontent = {}
if app.assignee:
flowcontent['application_assignee_id'] = app.assignee.id
else:
flowcontent['application_assignee_id'] = None
flowcontent = flow.getFields(flowcontent, app.routeid, workflowtype)
flowcontent = flow.getAccessRights(request, flowcontent, app.routeid, workflowtype)
flowcontent = flow.getHiddenAreas(flowcontent,app.routeid,workflowtype,request)
flowcontent['condactions'] = flow.getAllConditionBasedRouteActions(app.routeid)
initial['disabledfields'] = flow.getDisabled(flowcontent,app.routeid,workflowtype)
flowcontent['formcomponent'] = flow.getFormComponent(app.routeid, workflowtype)
initial['fieldstatus'] = []
if "fields" in flowcontent:
initial['fieldstatus'] = flowcontent['fields']
initial['fieldrequired'] = []
flowcontent = flow.getRequired(flowcontent, app.routeid, workflowtype)
if "formcomponent" in flowcontent:
if "update" in flowcontent['formcomponent']:
if "required" in flowcontent['formcomponent']['update']:
initial['fieldrequired'] = flowcontent['formcomponent']['update']['required']
initial["workflow"] = flowcontent
if float(app.routeid) > 1:
if app.assignee is None:
initial["workflow"]['may_update'] = "False"
if initial["workflow"]['may_update'] == "True":
if app.assignee != self.request.user:
initial["workflow"]['may_update'] = "False"
initial["may_change_application_applicant"] = flowcontent["may_change_application_applicant"]
if app.route_status == 'Draft':
initial['submitter_comment'] = app.submitter_comment
initial['state'] = app.state
multifilelist = []
a1 = app.land_owner_consent.all()
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['land_owner_consent'] = multifilelist
a1 = app.proposed_development_plans.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['proposed_development_plans'] = multifilelist
a1 = app.other_relevant_documents.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['other_relevant_documents'] = multifilelist
a1 = app.brochures_itineries_adverts.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['brochures_itineries_adverts'] = multifilelist
#initial['address'] = "THISIIII TETS"
#if 'brochures_itineries_adverts_json' in self.request.POST:
# initial['brochures_itineries_adverts'] = self.request.POST['brochures_itineries_adverts_json']
a1 = app.location_route_access.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['location_route_access'] = multifilelist
a1 = app.document_new_draft.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_new_draft'] = multifilelist
a1 = app.document_memo.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_memo'] = multifilelist
a1 = app.document_memo_2.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_memo_2'] = multifilelist
a1 = app.document_new_draft_v3.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_new_draft_v3'] = multifilelist
a1 = app.document_draft_signed.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_draft_signed'] = multifilelist
a1 = app.document_draft.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_draft'] = multifilelist
a1 = app.document_final_signed.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_final_signed'] = multifilelist
a1 = app.document_briefing_note.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_briefing_note'] = multifilelist
a1 = app.document_determination_approved.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_determination_approved'] = multifilelist
a1 = app.proposed_development_plans.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['proposed_development_plans'] = multifilelist
a1 = app.deed.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['deed'] = multifilelist
a1 = app.swan_river_trust_board_feedback.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['swan_river_trust_board_feedback'] = multifilelist
a1 = app.document_final.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_final'] = multifilelist
a1 = app.document_determination.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_determination'] = multifilelist
a1 = app.document_completion.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['document_completion'] = multifilelist
a1 = app.cert_survey.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['cert_survey'] = multifilelist
a1 = app.cert_public_liability_insurance.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['cert_public_liability_insurance'] = multifilelist
a1 = app.risk_mgmt_plan.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['risk_mgmt_plan'] = multifilelist
a1 = app.safety_mgmt_procedures.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['safety_mgmt_procedures'] = multifilelist
a1 = app.river_lease_scan_of_application.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['river_lease_scan_of_application'] = multifilelist
a1 = app.supporting_info_demonstrate_compliance_trust_policies.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['supporting_info_demonstrate_compliance_trust_policies'] = multifilelist
if app.approval_document_signed:
initial['approval_document_signed'] = app.approval_document_signed
if app.approval_document:
initial['approval_document'] = app.approval_document
# if Approval.objects.filter(application=app).count() > 1:
# initial['application_approval'] = Approval.objects.filter(application=app)
#initial['publication_newspaper'] = PublicationNewspaper.objects.get(application_id=self.object.id)
####### Record FK fields:
try:
LocObj = Location.objects.get(application_id=self.object.id)
if LocObj:
initial['certificate_of_title_volume'] = LocObj.title_volume
initial['folio'] = LocObj.folio
initial['diagram_plan_deposit_number'] = LocObj.dpd_number
initial['location'] = LocObj.location
initial['reserve_number'] = LocObj.reserve
initial['street_number_and_name'] = LocObj.street_number_name
initial['town_suburb'] = LocObj.suburb
initial['lot'] = LocObj.lot
initial['nearest_road_intersection'] = LocObj.intersection
initial['local_government_authority'] = LocObj.local_government_authority
except ObjectDoesNotExist:
donothing = ''
return initial
def post(self, request, *args, **kwargs):
app = self.get_object()
context = {}
if app.assignee:
context['application_assignee_id'] = app.assignee.id
else:
context['application_assignee_id'] = None
# if app.app_type == app.APP_TYPE_CHOICES.part5:
# if app.routeid is None:
# app.routeid = 1
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(request, context, app.routeid, workflowtype)
if float(app.routeid) > 1:
if app.assignee is None:
context['may_update'] = "False"
if context['may_update'] == "True":
if app.assignee != self.request.user:
context['may_update'] = "False"
if context['may_update'] != 'True':
messages.error(self.request, 'You do not have permissions to update this form.')
return HttpResponseRedirect(self.get_object().get_absolute_url())
if request.POST.get('cancel'):
app = Application.objects.get(id=kwargs['pk'])
if app.state == app.APP_STATE_CHOICES.new:
app.delete()
return HttpResponseRedirect(reverse('application_list'))
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the state to draft is this is a new application.
"""
forms_data = form.cleaned_data
self.object = form.save(commit=False)
# ToDO remove dupes of this line below. doesn't need to be called
# multiple times
application = Application.objects.get(id=self.object.id)
try:
new_loc = Location.objects.get(application_id=self.object.id)
except:
new_loc = Location()
new_loc.application_id = self.object.id
if 'other_relevant_documents_json' in self.request.POST:
json_data = json.loads(self.request.POST['other_relevant_documents_json'])
for d in self.object.other_relevant_documents.all():
self.object.other_relevant_documents.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.other_relevant_documents.add(doc)
if 'brochures_itineries_adverts_json' in self.request.POST:
if is_json(self.request.POST['brochures_itineries_adverts_json']) is True:
json_data = json.loads(self.request.POST['brochures_itineries_adverts_json'])
for d in self.object.brochures_itineries_adverts.all():
self.object.brochures_itineries_adverts.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.brochures_itineries_adverts.add(doc)
if 'land_owner_consent_json' in self.request.POST:
if is_json(self.request.POST['land_owner_consent_json']) is True:
json_data = json.loads(self.request.POST['land_owner_consent_json'])
for d in self.object.land_owner_consent.all():
self.object.land_owner_consent.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.land_owner_consent.add(doc)
if 'proposed_development_plans_json' in self.request.POST:
json_data = json.loads(self.request.POST['proposed_development_plans_json'])
self.object.proposed_development_plans.remove()
for d in self.object.proposed_development_plans.all():
self.object.proposed_development_plans.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.proposed_development_plans.add(doc)
if 'supporting_info_demonstrate_compliance_trust_policies_json' in self.request.POST:
json_data = json.loads(self.request.POST['supporting_info_demonstrate_compliance_trust_policies_json'])
self.object.supporting_info_demonstrate_compliance_trust_policies.remove()
for d in self.object.supporting_info_demonstrate_compliance_trust_policies.all():
self.object.supporting_info_demonstrate_compliance_trust_policies.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.supporting_info_demonstrate_compliance_trust_policies.add(doc)
if 'location_route_access_json' in self.request.POST:
json_data = json.loads(self.request.POST['location_route_access_json'])
self.object.location_route_access.remove()
for d in self.object.location_route_access.all():
self.object.location_route_access.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.location_route_access.add(doc)
if 'document_final_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_final_json'])
self.object.document_final.remove()
for d in self.object.document_final.all():
self.object.document_final.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_final.add(doc)
if 'safety_mgmt_procedures_json' in self.request.POST:
json_data = json.loads(self.request.POST['safety_mgmt_procedures_json'])
self.object.safety_mgmt_procedures.remove()
for d in self.object.safety_mgmt_procedures.all():
self.object.safety_mgmt_procedures.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.safety_mgmt_procedures.add(doc)
if 'risk_mgmt_plan_json' in self.request.POST:
json_data = json.loads(self.request.POST['risk_mgmt_plan_json'])
self.object.risk_mgmt_plan.remove()
for d in self.object.risk_mgmt_plan.all():
self.object.risk_mgmt_plan.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.risk_mgmt_plan.add(doc)
if 'cert_public_liability_insurance_json' in self.request.POST:
json_data = json.loads(self.request.POST['cert_public_liability_insurance_json'])
self.object.cert_public_liability_insurance.remove()
for d in self.object.cert_public_liability_insurance.all():
self.object.cert_public_liability_insurance.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.cert_public_liability_insurance.add(doc)
if 'cert_survey_json' in self.request.POST:
json_data = json.loads(self.request.POST['cert_survey_json'])
self.object.cert_survey.remove()
for d in self.object.cert_survey.all():
self.object.cert_survey.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.cert_survey.add(doc)
if 'document_determination_approved_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_determination_approved_json'])
self.object.document_determination_approved.remove()
for d in self.object.document_determination_approved.all():
self.object.document_determination_approved.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_determination_approved.add(doc)
if 'document_determination_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_determination_json'])
self.object.document_determination.remove()
for d in self.object.document_determination.all():
self.object.document_determination.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_determination.add(doc)
if 'document_briefing_note_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_briefing_note_json'])
self.object.document_briefing_note.remove()
for d in self.object.document_briefing_note.all():
self.object.document_briefing_note.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_briefing_note.add(doc)
if 'document_new_draft_v3_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_new_draft_v3_json'])
self.object.document_new_draft_v3.remove()
for d in self.object.document_new_draft_v3.all():
self.object.document_new_draft_v3.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_new_draft_v3.add(doc)
if 'document_memo_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_memo_json'])
self.object.document_memo.remove()
for d in self.object.document_memo.all():
self.object.document_memo.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_memo.add(doc)
if 'document_memo_2_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_memo_2_json'])
self.object.document_memo_2.remove()
for d in self.object.document_memo_2.all():
self.object.document_memo_2.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_memo_2.add(doc)
if 'deed_json' in self.request.POST:
if is_json(self.request.POST['deed_json']) is True:
json_data = json.loads(self.request.POST['deed_json'])
self.object.deed.remove()
for d in self.object.deed.all():
self.object.deed.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.deed.add(doc)
if 'swan_river_trust_board_feedback_json' in self.request.POST:
json_data = json.loads(self.request.POST['swan_river_trust_board_feedback_json'])
self.object.swan_river_trust_board_feedback.remove()
for d in self.object.swan_river_trust_board_feedback.all():
self.object.swan_river_trust_board_feedback.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.swan_river_trust_board_feedback.add(doc)
if 'river_lease_scan_of_application_json' in self.request.POST:
json_data = json.loads(self.request.POST['river_lease_scan_of_application_json'])
self.object.river_lease_scan_of_application.remove()
for d in self.object.river_lease_scan_of_application.all():
self.object.river_lease_scan_of_application.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.river_lease_scan_of_application.add(doc)
if 'document_draft_signed_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_draft_signed_json'])
self.object.document_draft_signed.remove()
for d in self.object.document_draft_signed.all():
self.object.document_draft_signed.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_draft_signed.add(doc)
if 'document_final_signed_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_final_signed_json'])
self.object.document_final_signed.remove()
for d in self.object.document_final_signed.all():
self.object.document_final_signed.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_final_signed.add(doc)
if 'document_draft_json' in self.request.POST:
json_data = json.loads(self.request.POST['document_draft_json'])
self.object.document_draft.remove()
for d in self.object.document_draft.all():
self.object.document_draft.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.document_draft.add(doc)
if 'approval_document_json' in self.request.POST:
self.object.approval_document = None
if is_json(self.request.POST['approval_document_json']) is True:
json_data = json.loads(self.request.POST['approval_document_json'])
new_doc = Record.objects.get(id=json_data['doc_id'])
self.object.approval_document = new_doc
if 'approval_document_signed_json' in self.request.POST:
self.object.approval_document_signed = None
if is_json(self.request.POST['approval_document_signed_json']) is True:
json_data = json.loads(self.request.POST['approval_document_signed_json'])
new_doc = Record.objects.get(id=json_data['doc_id'])
self.object.approval_document_signed = new_doc
if 'certificate_of_title_volume' in forms_data:
new_loc.title_volume = forms_data['certificate_of_title_volume']
if 'folio' in forms_data:
new_loc.folio = forms_data['folio']
if 'diagram_plan_deposit_number' in forms_data:
new_loc.dpd_number = forms_data['diagram_plan_deposit_number']
if 'location' in forms_data:
new_loc.location = forms_data['location']
if 'reserve_number' in forms_data:
new_loc.reserve = forms_data['reserve_number']
if 'street_number_and_name' in forms_data:
new_loc.street_number_name = forms_data['street_number_and_name']
if 'town_suburb' in forms_data:
new_loc.suburb = forms_data['town_suburb']
if 'lot' in forms_data:
new_loc.lot = forms_data['lot']
if 'nearest_road_intersection' in forms_data:
new_loc.intersection = forms_data['nearest_road_intersection']
if 'local_government_authority' in forms_data:
new_loc.local_government_authority = forms_data['local_government_authority']
if self.object.state == Application.APP_STATE_CHOICES.new:
self.object.state = Application.APP_STATE_CHOICES.draft
if self.object.jetty_dot_approval is None:
self.object.jetty_dot_approval = None
if self.object.vessel_or_craft_details == '':
self.object.vessel_or_craft_details =None
if self.object.beverage == '':
self.object.beverage = None
if self.object.byo_alcohol == '':
self.object.byo_alcohol = None
if self.object.liquor_licence == '':
self.object.liquor_licence = None
self.object.save()
new_loc.save()
if self.object.app_type == self.object.APP_TYPE_CHOICES.licence:
form.save_m2m()
# if self.request.POST.get('nextstep') or self.request.POST.get('prevstep'):
# print self.request.POST['nextstep']
# if self.request.POST.get('prevstep'):
# print self.request.POST['nextstep']
# print "CONDITION ROUTING"
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(application)
flow.get(workflowtype)
conditionactions = flow.getAllConditionBasedRouteActions(application.routeid)
if conditionactions:
for ca in conditionactions:
for fe in self.request.POST:
if ca == fe:
for ro in conditionactions[ca]['routeoptions']:
if ro['field'] in self.request.POST:
if ro['fieldvalue'] == self.request.POST[ro['field']]:
if "routeurl" in ro:
if ro["routeurl"] == "application_lodge":
return HttpResponseRedirect(reverse(ro["routeurl"],kwargs={'pk':self.object.id}))
if ro["routeurl"] == "application_issue":
return HttpResponseRedirect(reverse(ro["routeurl"],kwargs={'pk':self.object.id}))
self.object.routeid = ro['route']
self.object.state = ro['state']
self.object.route_status = flow.json_obj[ro['route']]['title']
self.object.save()
routeurl = "application_update"
if "routeurl" in ro:
routeurl = ro["routeurl"]
return HttpResponseRedirect(reverse(routeurl,kwargs={'pk':self.object.id}))
self.object.save()
return HttpResponseRedirect(self.object.get_absolute_url()+'update/')
#return HttpResponseRedirect(self.get_success_url(self.object))
class ApplicationLodge(LoginRequiredMixin, UpdateView):
model = Application
form_class = apps_forms.ApplicationLodgeForm
template_name = 'applications/application_lodge.html'
def get_context_data(self, **kwargs):
context = super(ApplicationLodge, self).get_context_data(**kwargs)
app = self.get_object()
if app.app_type == app.APP_TYPE_CHOICES.part5:
self.template_name = 'applications/application_lodge_part5.html'
if app.routeid is None:
app.routeid = 1
return context
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be lodged.
# Rule: application state must be 'draft'.
app = self.get_object()
flowcontext = {}
error_messages = False
if app.assignee:
flowcontext['application_assignee_id'] = app.assignee.id
else:
flowcontext['application_assignee_id'] = None
workflowtype = ''
if app.routeid is None:
app.routeid = 1
request = self.request
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext['may_lodge'] == "True":
route = flow.getNextRouteObj('lodge', app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if route is not None:
if 'required' in route:
for fielditem in route["required"]:
if hasattr(app, fielditem):
if getattr(app, fielditem) is None:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
#return HttpResponseRedirect(app.get_absolute_url()+'update/')
appattr = getattr(app, fielditem)
python3 = False
try:
unicode('test')
except:
python3 = True
pass
if python3 is True:
if isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
else:
if isinstance(appattr, unicode) or isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
if error_messages is True:
return HttpResponseRedirect(app.get_absolute_url()+'update/')
donothing = ""
else:
messages.error(self.request, 'This application has no matching routes.')
return HttpResponseRedirect(app.get_absolute_url())
else:
messages.error(self.request, 'This application cannot be lodged!')
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationLodge, self).get(request, *args, **kwargs)
def get_success_url(self):
#return reverse('application_list')
return reverse('home_page')
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
#return HttpResponseRedirect(self.get_object().get_absolute_url())
return HttpResponseRedirect(self.get_object().get_absolute_url()+'update/')
return super(ApplicationLodge, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override form_valid to set the submit_date and status of the new application.
"""
print ("FORM VALID")
app = self.get_object()
flowcontext = {}
error_messages = False
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
nextroute = flow.getNextRoute('lodge', app.routeid, workflowtype)
route = flow.getNextRouteObj('lodge', app.routeid, workflowtype)
app.routeid = nextroute
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if "required" in route:
for fielditem in route["required"]:
if hasattr(app, fielditem):
if getattr(app, fielditem) is None:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
#return HttpResponseRedirect(app.get_absolute_url()+'update/')
appattr = getattr(app, fielditem)
python3 = False
try:
unicode('test')
except:
python3 = True
if python3 is True:
if isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
else:
if isinstance(appattr, unicode) or isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
error_messages = True
if error_messages is True:
return HttpResponseRedirect(app.get_absolute_url()+'update/')
groupassignment = Group.objects.get(name=DefaultGroups['grouplink'][route['lodgegroup']])
app.group = groupassignment
#app.state = app.APP_STATE_CHOICES.with_admin
app.state = route['state']
app.status = 1
self.object.submit_date = date.today()
app.assignee = None
app.save()
# this get uses the new route id to get title of new route and updates the route_status.
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
app.route_status = flow.json_obj[app.routeid]['title']
app.save()
# Generate a 'lodge' action:
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.lodge,
user=self.request.user, action='Application lodgement')
action.save()
# Success message.
#msg = """Your {0} application has been successfully submitted. The application
#number is: <strong>WO-{1}</strong>.<br>
#Please note that routine applications take approximately 4-6 weeks to process.<br>
#If any information is unclear or missing, Parks and Wildlife may return your
#application to you to amend or complete.<br>
#The assessment process includes a 21-day external referral period. During this time
#your application may be referred to external departments, local government
#agencies or other stakeholders. Following this period, an internal report will be
#produced by an officer for approval by the Manager, Rivers and Estuaries Division,
#to determine the outcome of your application.<br>
#You will be notified by email once your {0} application has been determined and/or
#further action is required.""".format(app.get_app_type_display(), app.pk)
#messages.success(self.request, msg)
#emailcontext = {}
#emailcontext['app'] = self.object
#emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
#emailcontext['person'] = app.submitted_by
#emailcontext['body'] = msg
#sendHtmlEmail([app.submitted_by.email], emailcontext['application_name'] + ' application submitted ', emailcontext, 'application-lodged.html', None, None, None)
if float(route['state']) == float("18"):
if "payment_redirect" in route:
if route["payment_redirect"] == "True":
return HttpResponseRedirect(reverse('application_booking', args=(app.id,)))
return HttpResponseRedirect(self.get_success_url())
class ApplicationRefer(LoginRequiredMixin, CreateView):
"""A view to create a Referral object on an Application (if allowed).
"""
model = Referral
form_class = apps_forms.ReferralForm
def get(self, request, *args, **kwargs):
# TODO: business logic to check the application may be referred.
# Rule: application state must be 'with admin' or 'with referee'
app = Application.objects.get(pk=self.kwargs['pk'])
flowcontext = {}
# if app.app_type == app.APP_TYPE_CHOICES.part5:
if app.routeid is None:
app.routeid = 1
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext['may_refer'] != "True":
messages.error(self.request, 'Can not modify referrals on this application!')
return HttpResponseRedirect(app.get_absolute_url())
# else:
# if app.state not in [app.APP_STATE_CHOICES.with_admin, app.APP_STATE_CHOICES.with_referee]:
# # TODO: better/explicit error response.
# messages.error(
# self.request, 'This application cannot be referred!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationRefer, self).get(request, *args, **kwargs)
def get_success_url(self):
"""Override to redirect to the referral's parent application detail view.
"""
#messages.success(self.request, 'Referral has been added! ')
return reverse('application_refer', args=(self.object.application.pk,))
def get_context_data(self, **kwargs):
context = super(ApplicationRefer, self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['pk'])
context['application_referrals'] = Referral.objects.filter(application=self.kwargs['pk'])
app = Application.objects.get(pk=self.kwargs['pk'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
context = flow.getAccessRights(self.request, context, app.routeid, workflowtype)
return context
def get_initial(self):
initial = super(ApplicationRefer, self).get_initial()
# TODO: set the default period value based on application type.
initial['period'] = 21
return initial
def get_form_kwargs(self):
kwargs = super(ApplicationRefer, self).get_form_kwargs()
kwargs['application'] = Application.objects.get(pk=self.kwargs['pk'])
return kwargs
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationRefer, self).post(request, *args, **kwargs)
def form_valid(self, form):
app = Application.objects.get(pk=self.kwargs['pk'])
# if app.app_type == app.APP_TYPE_CHOICES.part5:
# flow = Flow()
# flow.get('part5')
# nextroute = flow.getNextRoute('referral',app.routeid,"part5")
# app.routeid = nextroute
self.object = form.save(commit=False)
self.object.application = app
#self.object.sent_date = date.today()
self.object.save()
# Set the application status to 'with referee'.
# app.state = app.APP_STATE_CHOICES.with_referee
# app.save()
# TODO: the process of sending the application to the referee.
# Generate a 'refer' action on the application:
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.refer,
user=self.request.user, action='Added Referral {}'.format(self.object.referee))
action.save()
return super(ApplicationRefer, self).form_valid(form)
class ApplicationAssignNextAction(LoginRequiredMixin, UpdateView):
"""A view to allow an application to be assigned to an internal user or back to the customer.
The ``action`` kwarg is used to define the new state of the application.
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
if app.assignee is None:
messages.error(self.request, 'Please Allocate an Assigned Person First')
return HttpResponseRedirect(app.get_absolute_url())
action = self.kwargs['action']
actionid = self.kwargs['actionid']
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
#route = flow.getNextRouteObj(action, app.routeid, workflowtype)
route = flow.getNextRouteObjViaId(int(actionid), app.routeid, workflowtype)
#allow_email_attachment
if action == "creator":
if flowcontext['may_assign_to_creator'] != "True":
messages.error(self.request, 'This application cannot be reassigned, Unknown Error')
return HttpResponseRedirect(app.get_absolute_url())
else:
# nextroute = flow.getNextRoute(action,app.routeid,"part5")
assign_action = flow.checkAssignedAction(action, flowcontext)
if assign_action != True:
if action in DefaultGroups['grouplink']:
messages.error(self.request, 'This application cannot be reassign to ' + DefaultGroups['grouplink'][action])
return HttpResponseRedirect(app.get_absolute_url())
else:
messages.error(self.request, 'This application cannot be reassign, Unknown Error')
return HttpResponseRedirect(app.get_absolute_url())
if action == 'referral':
app_refs = Referral.objects.filter(application=app).count()
#Referral.objects.filter(application=app).update(status=5)
Referral.objects.filter(application=app).update(status=Referral.REFERRAL_STATUS_CHOICES.referred, response_date=None)
if app_refs == 0:
messages.error(self.request, 'Unable to complete action as you have no referrals! ')
return HttpResponseRedirect(app.get_absolute_url())
if "required" in route:
for fielditem in route["required"]:
if hasattr(app, fielditem):
if getattr(app, fielditem) is None:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
return HttpResponseRedirect(reverse('application_update', args=(app.pk,)))
appattr = getattr(app, fielditem)
try:
if isinstance(appattr, unicode) or isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
return HttpResponseRedirect(reverse('application_update', args=(app.pk,)))
except:
if isinstance(appattr, str):
if len(appattr) == 0:
messages.error(self.request, 'Required Field ' + fielditem + ' is empty, Please Complete')
return HttpResponseRedirect(reverse('application_update', args=(app.pk,)))
return super(ApplicationAssignNextAction, self).get(request, *args, **kwargs)
def get_initial(self):
initial = super(ApplicationAssignNextAction, self).get_initial()
app = self.get_object()
action = self.kwargs['action']
actionid = self.kwargs['actionid']
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
route = flow.getNextRouteObjViaId(int(actionid), app.routeid, workflowtype)
print ("ROUTE")
print ("flow.getNextRouteObjViaId")
print (route)
#allow_email_attachment
allow_email_attachment = False
if 'allow_email_attachment' in route:
if route['allow_email_attachment'] == 'True':
allow_email_attachment = True
initial['allow_email_attachment'] = allow_email_attachment
initial['action'] = self.kwargs['action']
initial['records'] = None
return initial
# action = self.kwargs['action']
def get_form_class(self):
return apps_forms.ApplicationAssignNextAction
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssignNextAction, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_list')
def form_valid(self, form):
self.object = form.save(commit=False)
forms_data = form.cleaned_data
app = self.get_object()
action = self.kwargs['action']
actionid = self.kwargs['actionid']
# Upload New Files
# doc = None
# if self.request.FILES.get('records'): # Uploaded new file.
# doc = Record()
# doc.upload = forms_data['records']
# doc.name = forms_data['records'].name
# doc.save()
# print doc
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
FriendlyGroupList = flow.FriendlyGroupList()
flow.get(workflowtype)
assessed_by = None
if action == "creator":
groupassignment = None
assignee = app.submitted_by
elif action == 'referral':
groupassignment = Group.objects.get(name=DefaultGroups['grouplink']['assess'])
assignee = None
else:
assignee = None
assessed_by = self.request.user
groupassignment = Group.objects.get(name=DefaultGroups['grouplink'][action])
if app.assigned_officer:
if app.assigned_officer.groups.filter(name__in=[groupassignment.name]).exists():
assignee = app.assigned_officer
#route = flow.getNextRouteObj(action, app.routeid, workflowtype)
route = flow.getNextRouteObjViaId(int(actionid), app.routeid, workflowtype)
if route is None:
messages.error(self.request, 'Error In Assigning Next Route, No routes Found')
return HttpResponseRedirect(app.get_absolute_url())
if route["route"] is None:
messages.error(self.request, 'Error In Assigning Next Route, No routes Found')
return HttpResponseRedirect(app.get_absolute_url())
self.object.routeid = route["route"]
self.object.state = route["state"]
self.object.group = groupassignment
self.object.assignee = assignee
self.object.save()
# this get uses the new route id to get title of new route and updates the route_status.
workflowtype = flow.getWorkFlowTypeFromApp(self.object)
flow.get(workflowtype)
self.object.route_status = flow.json_obj[self.object.routeid]['title']
self.object.save()
comms = Communication()
comms.application = app
comms.comms_from = str(self.request.user.email)
if action == 'creator':
comms.comms_to = "Form Creator"
else:
comms.comms_to = FriendlyGroupList['grouplink'][action]
if self.object.state == '8':
pass
else:
comms.subject = route["title"]
comms.details = forms_data['details']
comms.state = route["state"]
comms.comms_type = 4
comms.save()
print ("COMMS")
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
print (self.request.POST['records_json'])
json_data = json.loads(self.request.POST['records_json'])
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
comms.records.add(doc)
#comms.save_m2m()
comms.save()
# if self.request.FILES.get('records'):
# if Attachment_Extension_Check('multi', self.request.FILES.getlist('other_relevant_documents'), None) is False:
# raise ValidationError('Other relevant documents contains and unallowed attachment extension.')
#
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.name = f.name
# doc.save()
# comms.records.add(doc)
# if doc:
# comms.records.add(doc)
if "stake_holder_communication" in route:
self.send_stake_holder_comms(app)
emailcontext = {}
emailcontext['app'] = self.object
if action != "creator" and action != 'referral':
emailcontext['groupname'] = DefaultGroups['grouplink'][action]
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailGroup('Application Assignment to Group ' + DefaultGroups['grouplink'][action], emailcontext, 'application-assigned-to-group.html', None, None, None, DefaultGroups['grouplink'][action])
if self.object.state != '14' and self.object.state != '19':
if app.assignee:
emailcontext = {}
emailcontext['app'] = self.object
emailcontext = {'person': app.assignee}
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
sendHtmlEmail([app.assignee.email], emailcontext['application_name'] + ' application assigned to you ', emailcontext, 'application-assigned-to-person.html', None, None, None)
elif action == "creator":
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailcontext['person'] = assignee
emailcontext['admin_comment'] = forms_data['submitter_comment']
sendHtmlEmail([assignee.email], emailcontext['application_name'] + ' application requires more information ', emailcontext, 'application-assigned-to-submitter.html', None, None, None)
elif action == "referral":
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailApplicationReferrals(app.id, 'Application for Feedback ', emailcontext, 'application-assigned-to-referee.html', None, None, None)
if self.object.state == '14' or self.object.state == '19':
# Form Commpleted & Create Approval
self.complete_application(app, self.object.state)
#if self.object.state == 19:
# self.complete_application_part5_not_supported(app)
if self.object.state == '10':
self.ammendment_approved(app)
if self.object.state == '8':
self.decline_notification(app, forms_data)
if 'process' in route:
if 'draft_completed' in route['process']:
self.draft_completed(app)
if 'final_completed' in route['process']:
self.final_completed(app)
if 'temp_approval' in route['process']:
self.temp_approval(app)
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.action, user=self.request.user,
action='Next Step Application Assigned to group ({}) with action title ({}) and route id ({}) '.format(groupassignment, route['title'], self.object.routeid))
action.save()
#if app.app_type == 4:
# return HttpResponseRedirect(reverse('emergencyworks_list'))
return HttpResponseRedirect(self.get_success_url())
def send_stake_holder_comms(self,app):
# application-stakeholder-comms.html
# get applicant contact emails
if app.organisation:
org_dels = Delegate.objects.filter(organisation=app.organisation)
for od in org_dels:
# get all organisation contact emails and names
StakeholderComms.objects.create(application=app,
email=od.email_user.email,
name=od.email_user.first_name + ' '+ od.email_user.last_name,
sent_date=date.today(),
role=1,
comm_type=1
)
emailcontext = {'person': od.email_user.first_name + ' '+ od.email_user.last_name}
sendHtmlEmail([od.email_user.email], 'Appplication has progressed', emailcontext, 'application-stakeholder-comms.html', None, None, None)
elif app.applicant:
StakeholderComms.objects.create(application=app,
email=app.applicant.email,
name=app.applicant.first_name + ' '+ app.applicant.last_name,
sent_date=date.today(),
role=1,
comm_type=1
)
emailcontext = {'person': app.applicant.first_name + ' '+ app.applicant.last_name}
sendHtmlEmail([app.applicant.email], 'Appplication has progressed', emailcontext, 'application-stakeholder-comms.html', None, None, None)
# get only applicant name and email
# Get Sumitter information
# submitter = app.submitted_by
if app.applicant != app.submitted_by:
StakeholderComms.objects.create(application=app,
email=app.submitted_by.email,
name=app.submitted_by.first_name + ' '+ app.submitted_by.last_name,
sent_date=date.today(),
role=2,
comm_type=1
)
emailcontext = {'person': app.submitted_by.first_name + ' '+ app.submitted_by.last_name}
sendHtmlEmail([app.submitted_by.email], 'Appplication has progressed', emailcontext, 'application-stakeholder-comms.html', None, None, None)
public_feedback = PublicationFeedback.objects.filter(application=app)
for pf in public_feedback:
StakeholderComms.objects.create(application=app,
email=pf.email,
name=pf.name,
sent_date=date.today(),
role=4,
comm_type=1
)
emailcontext = {'person': pf.name}
sendHtmlEmail([pf.email], 'Appplication has progressed', emailcontext, 'application-stakeholder-comms.html', None, None, None)
# Get feedback
# PublicationFeedback
refs = Referral.objects.filter(application=app)
for ref in refs:
StakeholderComms.objects.create(application=app,
email=ref.referee.email,
name=ref.referee.first_name + ' ' + ref.referee.last_name,
sent_date=date.today(),
role=3,
comm_type=1
)
emailcontext = {'person': ref.referee.first_name + ' ' + ref.referee.last_name}
sendHtmlEmail([ref.referee.email], 'Appplication has progressed', emailcontext, 'application-stakeholder-comms.html', None, None, None)
# Get Referrals
# Referral
# app.pfpfpf
def draft_completed(self,app):
emailcontext = {}
emailcontext['app'] = app
# if app.app_type == 3:
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailcontext['person'] = app.submitted_by
emailcontext['EXTERNAL_URL'] = settings.EXTERNAL_URL
sendHtmlEmail([app.submitted_by.email], 'Draft Report - Part 5 - '+str(app.id), emailcontext, 'application-part5-draft-report.html', None, None, None)
def final_completed(self,app):
emailcontext = {}
emailcontext['app'] = app
if app.app_type == 3:
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailcontext['person'] = app.submitted_by
emailcontext['EXTERNAL_URL'] = settings.EXTERNAL_URL
sendHtmlEmail([app.submitted_by.email], 'Final Report - Part - '+str(app.id), emailcontext, 'application-part5-final-report.html', None, None, None)
def decline_notification(self,app,forms_data):
attachment1 = None
if 'attach_to_email_json' in self.request.POST:
if is_json(self.request.POST['attach_to_email_json']) is True:
json_data = json.loads(self.request.POST['attach_to_email_json'])
doc = Record.objects.get(id=json_data['doc_id'])
attachment1 = doc.upload.path
print ("ATTACHMENT")
print (attachment1)
emailcontext = {}
emailcontext['app'] = app
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
emailcontext['person'] = app.submitted_by
emailcontext['communication_details'] = forms_data['details']
sendHtmlEmail([app.submitted_by.email], Application.APP_TYPE_CHOICES[app.app_type]+' application declined - '+str(app.id), emailcontext, 'application-declined.html', None, None, None, attachment1)
def temp_approval(self,app):
approval = Approval.objects.create(
app_type=app.app_type,
title=app.title,
applicant=app.applicant,
organisation=app.organisation,
application=app,
start_date=app.assessment_start_date,
expiry_date=app.expire_date,
status=7
)
def complete_application_part5(self,app):
if Approval.objects.filter(application=app).count() > 0:
approval = Approval.objects.filter(application=app)[0]
approval.approval_document = app.approval_document_signed
approval.save()
def complete_application(self,app, state):
"""Once and application is complete and approval needs to be created in the approval model.
"""
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
approval = None
if Approval.objects.filter(application=app).count() > 0:
from django.core.files.base import ContentFile
from django.core.files.base import File
approval = Approval.objects.filter(application=app)[0]
r = Record.objects.create(upload=app.approval_document_signed.upload.path,
name=app.approval_document_signed.name,
category=9,
metadata=app.approval_document_signed.metadata,
text_content=app.approval_document_signed.text_content,
file_group=2005,
file_group_ref_id=approval.id,
extension=app.approval_document_signed.extension
)
import pathlib
app_file_bytes = pathlib.Path(os.path.join(app.approval_document_signed.upload.path)).read_bytes()
#app_file = open(os.path.join(app.approval_document_signed.upload.path), 'rb' )
#with open(os.path.join(app.approval_document_signed.upload.path), "rb") as f:
#f = open(os.path.join(app.approval_document_signed.upload.path), "rb")
# app_file_bytes = f.read(1)
# while app_file_bytes != "":
# # Do stuff with byte.
# app_file_bytes = f.read(1)
#app_file_bytes = app_file.encode(encoding='UTF-8')
r.upload.save(app.approval_document_signed.name, ContentFile(app_file_bytes), save=False)
approval.approval_document = r
if app.app_type == 3:
approval.start_date = app.assessment_start_date
if int(state) == 19:
approval.status = 8
else:
approval.status = 1
print (approval.status)
approval.save()
else:
approval = Approval.objects.create(
app_type=app.app_type,
title=app.title,
applicant=app.applicant,
organisation=app.organisation,
application=app,
start_date=app.assessment_start_date,
expiry_date=app.expire_date,
status=1
)
if app.app_type==4:
approval.start_date = app.proposed_commence
approval.expiry_date = app.proposed_end
approval.save()
if app.old_application:
app.old_application.status=2
app.old_application.save()
old_approval = Approval.objects.get(id=app.old_approval_id)
old_approval.status = 3
old_approval.save()
action = Action(
content_object=app.old_application, category=Action.ACTION_CATEGORY_CHOICES.cancel, user=self.request.user,
action='Application cancelled due to amendment. New application : WO-{}, New Approval : AP-{}'.format(str(app.id), str(approval.id)))
action.save()
action = Action(
content_object=old_approval, category=Action.ACTION_CATEGORY_CHOICES.cancel, user=self.request.user,
action='Approval cancelled due to amendment. New application : WO-{}, New Approval : AP-{}'.format(str(app.id), str(approval.id)))
action.save()
emailcontext = {}
emailcontext['app'] = app
emailcontext['approval'] = approval
pdftool = PDFtool()
# applications/email/application-permit-proposal.html
approval_pdf = BASE_DIR+'/pdfs/approvals/'+str(approval.id)+'-approval.pdf'
# email send after application completed..(issued)
if app.app_type == 1:
# Permit Proposal
pdftool.generate_permit(approval)
emailcontext['person'] = app.submitted_by
emailcontext['conditions_count'] = Condition.objects.filter(application=app).count()
sendHtmlEmail([app.submitted_by.email], 'Permit - '+app.title, emailcontext, 'application-permit-proposal.html', None, None, None, approval_pdf)
elif app.app_type == 2:
# Licence Proposal
pdftool.generate_licence(approval)
emailcontext['person'] = app.submitted_by
emailcontext['vessels'] = app.vessels.all()
emailcontext['approval'] = approval
sendHtmlEmail([app.submitted_by.email], 'Licence Permit - '+app.title, emailcontext, 'application-licence-permit-proposal.html', None, None, None, approval_pdf)
elif app.app_type == 3:
emailcontext['person'] = app.submitted_by
emailcontext['approval'] = approval
approval_pdf = approval.approval_document.upload.path
sendHtmlEmail([app.submitted_by.email], 'Part 5 - '+app.title, emailcontext, 'application-licence-permit-proposal.html', None, None, None, approval_pdf)
elif app.app_type == 4:
pdftool.generate_emergency_works(approval)
emailcontext['person'] = app.submitted_by
emailcontext['conditions_count'] = Condition.objects.filter(application=app).count()
sendHtmlEmail([app.submitted_by.email], 'Emergency Works - '+app.title, emailcontext, 'application-permit-proposal.html', None, None, None, approval_pdf)
elif app.app_type == 6:
emailcontext['person'] = app.submitted_by
emailcontext['approval'] = approval
approval_pdf = approval.approval_document.upload.path
sendHtmlEmail([app.submitted_by.email], 'Section 84 - '+app.title, emailcontext, 'application-licence-permit-proposal.html', None, None, None, approval_pdf)
elif app.app_type == 10 or app.app_type == 11:
# Permit & Licence Renewal
emailcontext['person'] = app.submitted_by
sendHtmlEmail([app.submitted_by.email], 'Draft Report - Part 5 - '+str(app.id)+' - location - description of works - applicant', emailcontext, 'application-licence-permit-proposal.html', None, None, None)
####################
# Disabling compliance creationg after approval ( this is now handle by cron script as we are not creating all future compliance all at once but only the next due complaince.
return
###################
# For compliance ( create clearance of conditions )
# get all conditions
conditions = Condition.objects.filter(application=app)
# print conditions
# create clearance conditions
for c in conditions:
start_date = app.proposed_commence
end_date = c.due_date
if c.recur_pattern == 1:
num_of_weeks = (end_date - start_date).days / 7.0
num_of_weeks_whole = str(num_of_weeks).split('.')
num_of_weeks_whole = num_of_weeks_whole[0]
week_freq = num_of_weeks / c.recur_freq
week_freq_whole = int(str(week_freq).split('.')[0])
loopcount = 1
loop_start_date = start_date
while loopcount <= week_freq_whole:
loopcount = loopcount + 1
week_date_plus = timedelta(weeks = c.recur_freq)
new_week_date = loop_start_date + week_date_plus
loop_start_date = new_week_date
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=new_week_date,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
if week_freq > week_freq_whole:
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=c.due_date,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
if c.recur_pattern == 2:
r = relativedelta(end_date, start_date)
num_of_months = float(r.years * 12 + r.months) / c.recur_freq
loopcount = 0
loop_start_date = start_date
while loopcount < int(num_of_months):
months_date_plus = loop_start_date + relativedelta(months=c.recur_freq)
loop_start_date = months_date_plus
loopcount = loopcount + 1
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=months_date_plus,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
if num_of_months > loopcount:
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=end_date,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
if c.recur_pattern == 3:
r = relativedelta(end_date, start_date)
if r.years > 0:
loopcount = 0
loop_start_date = start_date
while loopcount < int(r.years):
years_date_plus = loop_start_date + relativedelta(years=c.recur_freq)
loop_start_date = years_date_plus
loopcount = loopcount + 1
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=years_date_plus,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
if r.months > 0 or r.days > 0:
compliance = Compliance.objects.create(
app_type=app.app_type,
title=app.title,
condition=c,
approval_id=approval.id,
applicant=approval.applicant,
assignee=None,
assessed_by=None,
assessed_date=None,
due_date=end_date,
status=Compliance.COMPLIANCE_STATUS_CHOICES.future
)
#print c.iii
def ammendment_approved(self,app):
if app.approval_id:
approval = Approval.objects.get(id=app.approval_id)
approval.ammendment_application = app
approval.save()
return
class ApplicationAssignPerson(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a person
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
if app.state == 14:
messages.error(self.request, 'This application is completed and cannot be assigned.')
return HttpResponseRedirect("/")
if app.group is None:
messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
return HttpResponseRedirect(app.get_absolute_url())
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == app.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
print (flowcontext["may_assign_to_person"])
if flowcontext["may_assign_to_person"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationAssignPerson, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignPersonForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssignPerson, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_update', args=(self.object.pk,))
def form_valid(self, form):
self.object = form.save(commit=True)
app = self.object
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
flow.get(workflowtype)
emailcontext = {'person': app.assignee}
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
if self.request.user != app.assignee:
sendHtmlEmail([app.assignee.email], emailcontext['application_name'] + ' application assigned to you ', emailcontext, 'application-assigned-to-person.html', None, None, None)
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_state_display()))
action.save()
if self.request.user != app.assignee:
messages.success(self.request, 'Assign person completed')
return HttpResponseRedirect(reverse('application_list'))
else:
messages.success(self.request, 'Assign person completed')
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ApplicationAssignPerson, self).get_initial()
app = self.get_object()
if app.routeid is None:
app.routeid = 1
initial['assigngroup'] = app.group
return initial
class ApplicationAssignOfficer(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a person
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
if app.state == 14:
messages.error(self.request, 'This application is completed and cannot be assigned.')
return HttpResponseRedirect("/")
if app.group is None:
messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
return HttpResponseRedirect(app.get_absolute_url())
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == app.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_assign_to_officer"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationAssignOfficer, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignOfficerForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssignOfficer, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_update', args=(self.object.pk,))
def form_valid(self, form):
self.object = form.save(commit=True)
app = self.object
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
flow.get(workflowtype)
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Application assigned officer to {} '.format(self.object.assigned_officer.get_full_name()))
action.save()
if self.request.user != app.assignee:
messages.success(self.request, 'Assign officer completed')
return HttpResponseRedirect(reverse('application_list'))
else:
messages.success(self.request, 'Assign officer completed')
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ApplicationAssignOfficer, self).get_initial()
app = self.get_object()
if app.routeid is None:
app.routeid = 1
initial['assigngroup'] = app.group
return initial
class ApplicationCancel(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a person
"""
model = Application
template_name = 'applications/application_cancel_form.html'
def get(self, request, *args, **kwargs):
app = self.get_object()
if app.state == 14:
messages.error(self.request, 'This application is completed and cannot be cancelled.')
return HttpResponseRedirect("/")
#if app.group is None:
# messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
# return HttpResponseRedirect(app.get_absolute_url())
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == app.app_type:
app_type_short_name = i
#flow = Flow()
#flow.get(app_type_short_name)
#flowcontext = {}
#flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ApplicationCancel, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignCancelForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationCancel, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_list', args=(self.object.pk,))
def form_valid(self, form):
self.object = form.save(commit=True)
app = self.object
app.status = 2
app.save()
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.cancel, user=self.request.user,
action='Application cancelled')
action.save()
messages.success(self.request, 'Application cancelled')
return HttpResponseRedirect(reverse('application_list'))
def get_initial(self):
initial = super(ApplicationCancel, self).get_initial()
return initial
class ComplianceAssignPerson(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a person
"""
model = Compliance
def get(self, request, *args, **kwargs):
app = self.get_object()
# if app.state == 14:
# messages.error(self.request, 'This compliance is approved and cannot be assigned.')
# return HttpResponseRedirect("/")
return super(ComplianceAssignPerson, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.ComplianceAssignPersonForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ComplianceAssignPerson, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse('compliance_approval_update_internal', args=(self.object.pk,))
def form_valid(self, form):
self.object = form.save(commit=True)
app = self.object
#flow = Flow()
#workflowtype = flow.getWorkFlowTypeFromApp(app)
#DefaultGroups = flow.groupList()
#flow.get(workflowtype)
#emailcontext = {'person': app.assignee}
#emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
#if self.request.user != app.assignee:
# sendHtmlEmail([app.assignee.email], emailcontext['application_name'] + ' application assigned to you ', emailcontext, 'application-assigned-to-person.html', None, None, None)
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_status_display()))
action.save()
if self.request.user != app.assignee:
messages.success(self.request, 'Assign person completed')
return HttpResponseRedirect(reverse('application_list'))
else:
messages.success(self.request, 'Assign person completed')
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ComplianceAssignPerson, self).get_initial()
app = self.get_object()
initial['assigngroup'] = app.group
return initial
#if app.routeid is None:
# app.routeid = 1
class ApplicationAssignApplicantCompany(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a company holder
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
context_processor = template_context(self.request)
staff_access = context_processor['admin_assessor_staff']
if staff_access == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
#if app.group is None:
# messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationAssignApplicantCompany, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignApplicantFormCompany
def get_success_url(self, application_id):
return reverse('application_update', args=(application_id,))
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff_access = context_processor['admin_assessor_staff']
if staff_access == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssignApplicantCompany, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=True)
self.object.applicant = None
self.object.save()
app = self.object
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
flow.get(workflowtype)
emailcontext = {'person': app.assignee}
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
if self.object.assignee:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_state_display()))
action.save()
return HttpResponseRedirect(self.get_success_url(self.kwargs['pk']))
def get_initial(self):
initial = super(ApplicationAssignApplicantCompany, self).get_initial()
app = self.get_object()
initial['organisation'] = self.kwargs['organisation_id']
return initial
class ApplicationAssignApplicant(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant details to be reassigned to a different applicant name and
is only can only be set by and admin officer.
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
context_processor = template_context(self.request)
staff_access = context_processor['admin_assessor_staff']
if staff_access == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
#if app.group is None:
# messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationAssignApplicant, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignApplicantForm
def get_success_url(self, application_id):
return reverse('application_update', args=(application_id,))
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
staff_access = context_processor['admin_assessor_staff']
if staff_access == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssignApplicant, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.organisation = None
self.object.save()
app = self.object
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
flow.get(workflowtype)
emailcontext = {'person': app.assignee}
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
# if self.request.user != app.assignee:
# sendHtmlEmail([app.assignee.email], emailcontext['application_name'] + ' application assigned to you ', emailcontext, 'application-assigned-to-person.html', None, None, None)
# Record an action on the application:
if self.object.assignee:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_state_display()))
action.save()
return HttpResponseRedirect(self.get_success_url(self.kwargs['pk']))
def get_initial(self):
initial = super(ApplicationAssignApplicant, self).get_initial()
app = self.get_object()
initial['applicant'] = self.kwargs['applicantid']
return initial
class ApplicationAssign(LoginRequiredMixin, UpdateView):
"""A view to allow an application to be assigned to an internal user or back to the customer.
The ``action`` kwarg is used to define the new state of the application.
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
if self.kwargs['action'] == 'customer':
# Rule: application can go back to customer when only status is
# 'with admin'.
if app.state != app.APP_STATE_CHOICES.with_admin:
messages.error(
self.request, 'This application cannot be returned to the customer!')
return HttpResponseRedirect(app.get_absolute_url())
if self.kwargs['action'] == 'assess':
# Rule: application can be assessed when status is 'with admin',
# 'with referee' or 'with manager'.
if app.app_type == app.APP_TYPE_CHOICES.part5:
flow = Flow()
flow.get('part5')
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, 'part5')
if flowcontext["may_assign_assessor"] != "True":
messages.error(self.request, 'This application cannot be assigned to an assessor!')
return HttpResponseRedirect(app.get_absolute_url())
else:
if app.state not in [app.APP_STATE_CHOICES.with_admin, app.APP_STATE_CHOICES.with_referee, app.APP_STATE_CHOICES.with_manager]:
messages.error(self.request, 'This application cannot be assigned to an assessor!')
return HttpResponseRedirect(app.get_absolute_url())
# Rule: only the assignee (or a superuser) can assign for approval.
if self.kwargs['action'] == 'approve':
if app.app_type == app.APP_TYPE_CHOICES.part5:
flow = Flow()
flow.get('part5')
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, 'part5')
if flowcontext["may_submit_approval"] != "True":
messages.error(self.request, 'This application cannot be assigned to an assessor!')
return HttpResponseRedirect(app.get_absolute_url())
else:
if app.state != app.APP_STATE_CHOICES.with_assessor:
messages.error(self.request, 'You are unable to assign this application for approval/issue!')
return HttpResponseRedirect(app.get_absolute_url())
if app.assignee != request.user and not request.user.is_superuser:
messages.error(self.request, 'You are unable to assign this application for approval/issue!')
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationAssign, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
if self.kwargs['action'] == 'customer':
return apps_forms.AssignCustomerForm
elif self.kwargs['action'] == 'process':
return apps_forms.AssignProcessorForm
elif self.kwargs['action'] == 'assess':
return apps_forms.AssignAssessorForm
elif self.kwargs['action'] == 'approve':
return apps_forms.AssignApproverForm
elif self.kwargs['action'] == 'assign_emergency':
return apps_forms.AssignEmergencyForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationAssign, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
app = self.object
if self.kwargs['action'] == 'customer':
messages.success(self.request, 'Application {} has been assigned back to customer'.format(self.object.pk))
else:
messages.success(self.request, 'Application {} has been assigned to {}'.format(self.object.pk, self.object.assignee.get_full_name()))
if self.kwargs['action'] == 'customer':
# Assign the application back to the applicant and make it 'draft'
# status.
self.object.assignee = self.object.applicant
self.object.state = self.object.APP_STATE_CHOICES.draft
# TODO: email the feedback back to the customer.
if self.kwargs['action'] == 'assess':
if app.app_type == app.APP_TYPE_CHOICES.part5:
flow = Flow()
flow.get('part5')
nextroute = flow.getNextRoute('assess', app.routeid, "part5")
self.object.routeid = nextroute
self.object.state = self.object.APP_STATE_CHOICES.with_assessor
if self.kwargs['action'] == 'approve':
if app.app_type == app.APP_TYPE_CHOICES.part5:
flow = Flow()
flow.get('part5')
nextroute = flow.getNextRoute('manager', app.routeid, "part5")
self.object.routeid = nextroute
self.object.state = self.object.APP_STATE_CHOICES.with_manager
if self.kwargs['action'] == 'process':
if app.app_type == app.APP_TYPE_CHOICES.part5:
flow = Flow()
flow.get('part5')
nextroute = flow.getNextRoute('admin', app.routeid, "part5")
self.object.routeid = nextroute
self.object.state = self.object.APP_STATE_CHOICES.with_manager
self.object.save()
if self.kwargs['action'] == 'customer':
# Record the feedback on the application:
d = form.cleaned_data
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.communicate, user=self.request.user,
action='Feedback provided to applicant: {}'.format(d['feedback']))
action.save()
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_state_display()))
action.save()
return HttpResponseRedirect(self.get_success_url())
# have disbled the url.. this should be covered in the workflow.
class ApplicationDiscard(LoginRequiredMixin, UpdateView):
"""Allows and applicant to discard the application.
"""
model = Application
def get(self, request, *args, **kwargs):
app = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if app.state == 1:
if request.user.id == app.assignee.id:
donothing = ""
elif admin_staff is True:
donothing = ""
else:
messages.error(self.request, 'Sorry you are not authorised')
return HttpResponseRedirect(self.get_success_url())
else:
messages.error(self.request, 'Sorry you are not authorised')
return HttpResponseRedirect(self.get_success_url())
#if app.group is None:
# messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationDiscard, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.ApplicationDiscardForm
def get_success_url(self):
return reverse('home_page')
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ApplicationDiscard, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.state = 17
self.object.route_status = "Deleted"
self.object.save()
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Application Discard')
action.save()
messages.success(self.request, "Your application has been discard")
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ApplicationDiscard, self).get_initial()
app = self.get_object()
return initial
class ComplianceActions(DetailView):
model = Compliance
template_name = 'applications/compliance_actions.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceActions, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceActions, self).get_context_data(**kwargs)
app = self.get_object()
# TODO: define a GenericRelation field on the Application model.
context['actions'] = Action.objects.filter(
content_type=ContentType.objects.get_for_model(app), object_id=app.pk).order_by('-timestamp')
return context
class ComplianceSubmit(LoginRequiredMixin, UpdateView):
"""Allows and applicant to discard the application.
"""
model = Compliance
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Delegate.objects.filter(email_user=self.request.user, organisation=self.object.organisation).count()
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
elif self.request.user == self.object.applicant:
pass
elif org == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceSubmit, self).get(request, *args, **kwargs)
def get_form_class(self):
return apps_forms.ComplianceSubmitForm
def get_success_url(self):
return reverse('compliance_condition_complete', args=(self.object.id,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ComplianceSubmit, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.status = 9
self.object.submit_date = datetime.now()
self.object.submitted_by = self.request.user
assigngroup = Group.objects.get(name='Statdev Assessor')
self.object.group = assigngroup
self.object.save()
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance Submitted')
action.save()
messages.success(self.request, "Your compliance has beeen submitted for approval")
emailcontext = {}
#emailcontext['groupname'] = DefaultGroups['grouplink'][action]
emailcontext['clearance_id'] = self.object.id
emailGroup('New Clearance of Condition Submitted', emailcontext, 'clearance-of-condition-submitted.html', None, None, None, 'Statdev Assessor')
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ComplianceSubmit, self).get_initial()
app = self.get_object()
return initial
class ComplianceStaff(LoginRequiredMixin, UpdateView):
"""Allows and applicant to discard the application.
"""
model = Compliance
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceStaff, self).get(request, *args, **kwargs)
def get_form_class(self):
return apps_forms.ComplianceStaffForm
def get_success_url(self):
return reverse('home_page')
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ComplianceStaff, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
action = self.kwargs['action']
if action == 'approve':
self.object.status = 4
self.object.assessed_by = self.request.user
self.object.assessed_date = date.today()
self.object.assignee = None
messages.success(self.request, "Compliance has been approved.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been approved')
action.save()
emailcontext = {}
emailcontext['app'] = self.object
emailcontext['person'] = self.object.submitted_by
emailcontext['body'] = "Your clearance of condition has been approved"
sendHtmlEmail([self.object.submitted_by.email], 'Clearance of condition has been approved', emailcontext, 'clearance-approved.html', None, None, None)
elif action == 'manager':
self.object.status = 6
#self.object.group
approver = Group.objects.get(name='Statdev Approver')
self.object.assignee = None
self.object.group = approver
messages.success(self.request, "Compliance has been assigned to the manager group.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance assigned to Manager')
action.save()
emailcontext = {}
emailcontext['clearance_id'] = self.object.id
emailGroup('Clearance of Condition Assigned to Manager Group', emailcontext, 'clearance-of-condition-assigned-groups.html', None, None, None, 'Statdev Approver')
elif action == 'holder':
self.object.status = 7
self.object.group = None
self.object.assignee = None
messages.success(self.request, "Compliance has been assigned to the holder.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been return to holder')
action.save()
emailcontext = {}
emailcontext['app'] = self.object
emailcontext['person'] = self.object.submitted_by
emailcontext['body'] = "Your clearance of condition requires additional information."
sendHtmlEmail([self.object.submitted_by.email], 'Your clearance of condition requires additional information please login and resubmit with additional information.', emailcontext, 'clearance-holder.html', None, None, None)
elif action == 'assessor':
self.object.status = 5
self.object.group = None
self.object.assignee = None
assigngroup = Group.objects.get(name='Statdev Assessor')
self.object.group = assigngroup
messages.success(self.request, "Compliance has been assigned to the assessor.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been return to holder')
action.save()
emailcontext = {}
emailcontext['clearance_id'] = self.object.id
emailGroup('Clearance of Condition Assigned to Assessor Group', emailcontext, 'clearance-of-condition-assigned-groups.html', None, None, None, 'Statdev Assessor')
self.object.save()
# Record an action on the application:
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ComplianceStaff, self).get_initial()
app = self.get_object()
initial['action'] = self.kwargs['action']
return initial
class ApplicationIssue(LoginRequiredMixin, UpdateView):
"""A view to allow a manager to issue an assessed application.
"""
model = Application
def get(self, request, *args, **kwargs):
# Rule: only the assignee (or a superuser) can perform this action.
app = self.get_object()
if app.assignee == request.user or request.user.is_superuser:
return super(ApplicationIssue, self).get(request, *args, **kwargs)
messages.error(
self.request, 'You are unable to issue this application!')
return HttpResponseRedirect(app.get_absolute_url())
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url()+'update/')
return super(ApplicationIssue, self).post(request, *args, **kwargs)
def get_form_class(self):
app = self.get_object()
if app.app_type == app.APP_TYPE_CHOICES.emergency:
return apps_forms.ApplicationEmergencyIssueForm
else:
return apps_forms.ApplicationIssueForm
def get_initial(self):
initial = super(ApplicationIssue, self).get_initial()
app = self.get_object()
if app.app_type == app.APP_TYPE_CHOICES.emergency:
if app.organisation:
initial['holder'] = app.organisation.name
initial['abn'] = app.organisation.abn
elif app.applicant:
initial['holder'] = app.applicant.get_full_name()
return initial
def form_valid(self, form):
self.object = form.save(commit=False)
d = form.cleaned_data
if self.request.POST.get('issue') == 'Issue':
self.object.state = self.object.APP_STATE_CHOICES.current
self.object.assignee = None
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.issue,
user=self.request.user, action='Application issued')
action.save()
if self.object.app_type == self.object.APP_TYPE_CHOICES.emergency:
self.object.issue_date = date.today()
msg = """<strong>The emergency works has been successfully issued.</strong><br />
<br />
<strong>Emergency Works:</strong> \tEW-{0}<br />
<strong>Date / Time:</strong> \t{1}<br />
<br />
<a href="{2}">{3}</a>
<br />
"""
if self.object.applicant:
msg = msg + """The Emergency Works has been emailed."""
else:
msg = msg + """The Emergency Works needs to be printed and posted."""
messages.success(self.request, msg.format(self.object.pk, self.object.issue_date.strftime('%d/%m/%Y'),
self.get_success_url() + "pdf", 'EmergencyWorks.pdf'))
else:
messages.success(
self.request, 'Application {} has been issued'.format(self.object.pk))
elif self.request.POST.get('decline') == 'Decline':
self.object.state = self.object.APP_STATE_CHOICES.declined
self.object.assignee = None
# Record an action on the application:
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.decline,
user=self.request.user, action='Application declined')
action.save()
messages.warning(
self.request, 'Application {} has been declined'.format(self.object.pk))
self.object.save()
# TODO: logic around emailing/posting the application to the customer.
return HttpResponseRedirect(self.get_success_url())
class OLDComplianceAssignPerson(LoginRequiredMixin, UpdateView):
"""A view to allow an application applicant to be assigned to a person
"""
model = Compliance
def get(self, request, *args, **kwargs):
app = self.get_object()
if app.group is None:
messages.error(self.request, 'Unable to set Person Assignments as No Group Assignments Set!')
return HttpResponseRedirect(app.get_absolute_url())
return super(ApplicationAssignPerson, self).get(request, *args, **kwargs)
def get_form_class(self):
# Return the specified form class
return apps_forms.AssignPersonForm
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().get_absolute_url())
return super(ComplianceAssignPerson, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=True)
app = self.object
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
DefaultGroups = flow.groupList()
flow.get(workflowtype)
emailcontext = {'person': app.assignee}
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
if self.request.user != app.assignee:
sendHtmlEmail([app.assignee.email], emailcontext['application_name'] + ' application assigned to you ', emailcontext, 'application-assigned-to-person.html', None, None, None)
# Record an action on the application:
# action = Action(
# content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
# action='Assigned application to {} (status: {})'.format(self.object.assignee.get_full_name(), self.object.get_state_display()))
# action.save()
if self.request.user != app.assignee:
return HttpResponseRedirect(reverse('application_list'))
else:
return HttpResponseRedirect(self.get_success_url())
def get_initial(self):
initial = super(ComplianceAssignPerson, self).get_initial()
app = self.get_object()
if app.routeid is None:
app.routeid = 1
initial['assigngroup'] = app.group
return initial
class ReferralComplete(LoginRequiredMixin, UpdateView):
"""A view to allow a referral to be marked as 'completed'.
"""
model = Referral
form_class = apps_forms.ReferralCompleteForm
def get(self, request, *args, **kwargs):
app = self.get_object()
refcount = Referral.objects.filter(application=app,referee=self.request.user).count()
if refcount == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
# Rule: can't mark a referral completed more than once.
# if referral.response_date:
if referral.status != Referral.REFERRAL_STATUS_CHOICES.referred:
messages.error(self.request, 'This referral is already completed!')
return HttpResponseRedirect(referral.application.get_absolute_url())
# Rule: only the referee (or a superuser) can mark a referral
# "complete".
if referral.referee == request.user or request.user.is_superuser:
return super(ReferralComplete, self).get(request, *args, **kwargs)
messages.error(
self.request, 'You are unable to mark this referral as complete!')
return HttpResponseRedirect(referral.application.get_absolute_url())
def get_context_data(self, **kwargs):
context = super(ReferralComplete, self).get_context_data(**kwargs)
self.template_name = 'applications/referral_complete_form.html'
context['application'] = self.get_object().application
return context
def post(self, request, *args, **kwargs):
app = self.get_object()
refcount = Referral.objects.filter(application=app,referee=self.request.user).count()
if refcount == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralComplete, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.response_date = date.today()
self.object.status = Referral.REFERRAL_STATUS_CHOICES.responded
self.object.save()
app = self.object.application
# Record an action on the referral's application:
action = Action(
content_object=app, user=self.request.user,
action='Referral to {} marked as completed'.format(self.object.referee))
action.save()
# If there are no further outstanding referrals, then set the
# application status to "with admin".
# if not Referral.objects.filter(
# application=app, status=Referral.REFERRAL_STATUS_CHOICES.referred).exists():
# app.state = Application.APP_STATE_CHOICES.with_admin
# app.save()
refnextaction = Referrals_Next_Action_Check()
refactionresp = refnextaction.get(app)
if refactionresp == True:
app_updated = refnextaction.go_next_action(app)
# Record an action.
action = Action(
content_object=app,
action='No outstanding referrals, application routed to nextstep "{}"'.format(app_updated.get_state_display()), category=3)
action.save()
return HttpResponseRedirect(app.get_absolute_url())
class ReferralRecall(LoginRequiredMixin, UpdateView):
model = Referral
form_class = apps_forms.ReferralRecallForm
template_name = 'applications/referral_recall.html'
def get(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_recall_resend"] == "True":
pass
#admin_staff = context_processor['admin_staff']
#if admin_staff == True:
# pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
# Rule: can't recall a referral that is any other status than
# 'referred'.
if referral.status != Referral.REFERRAL_STATUS_CHOICES.referred:
messages.error(self.request, 'This referral is already completed!')
return HttpResponseRedirect(referral.application.get_absolute_url())
return super(ReferralRecall, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralRecall, self).get_context_data(**kwargs)
context['referral'] = self.get_object()
return context
def post(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_recall_resend"] == "True":
pass
#if admin_staff == True:
# pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralRecall, self).post(request, *args, **kwargs)
def form_valid(self, form):
ref = self.get_object()
ref.status = Referral.REFERRAL_STATUS_CHOICES.recalled
ref.save()
# Record an action on the referral's application:
action = Action(
content_object=ref.application, user=self.request.user,
action='Referral to {} recalled'.format(ref.referee), category=3)
action.save()
# check to see if there is any uncompleted/unrecalled referrals
# If no more pending referrals than more to next step in workflow
refnextaction = Referrals_Next_Action_Check()
refactionresp = refnextaction.get(ref.application)
if refactionresp == True:
refnextaction.go_next_action(ref.application)
action = Action(
content_object=ref.application, user=self.request.user,
action='All Referrals Completed, Progress to next Workflow Action {} '.format(ref.referee), category=3)
action.save()
return HttpResponseRedirect(ref.application.get_absolute_url())
class ReferralResend(LoginRequiredMixin, UpdateView):
model = Referral
form_class = apps_forms.ReferralResendForm
template_name = 'applications/referral_resend.html'
def get(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_resend"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if referral.status != Referral.REFERRAL_STATUS_CHOICES.recalled & referral.status != Referral.REFERRAL_STATUS_CHOICES.responded:
messages.error(self.request, 'This referral is already completed!' + str(referral.status) + str(Referral.REFERRAL_STATUS_CHOICES.responded))
return HttpResponseRedirect(referral.application.get_absolute_url())
return super(ReferralResend, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralResend, self).get_context_data(**kwargs)
context['referral'] = self.get_object()
return context
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
referral = self.get_object()
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_resend"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralResend, self).post(request, *args, **kwargs)
def form_valid(self, form):
ref = self.get_object()
ref.status = Referral.REFERRAL_STATUS_CHOICES.referred
ref.save()
# Record an action on the referral's application:
action = Action(
content_object=ref.application, user=self.request.user,
action='Referral to {} resend '.format(ref.referee), category=3)
action.save()
return HttpResponseRedirect(ref.application.get_absolute_url())
class ReferralSend(LoginRequiredMixin, UpdateView):
model = Referral
form_class = apps_forms.ReferralResendForm
template_name = 'applications/referral_resend.html'
def get(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_resend"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if referral.status != Referral.REFERRAL_STATUS_CHOICES.with_admin:
messages.error(self.request, 'This referral is already sent for referral!' + str(referral.status) + str(Referral.REFERRAL_STATUS_CHOICES.responded))
return HttpResponseRedirect(referral.application.get_absolute_url())
return super(ReferralSend, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralSend, self).get_context_data(**kwargs)
context['referral'] = self.get_object()
return context
def get_success_url(self, application_id):
return reverse('application_refer', args=(application_id,))
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
referral = self.get_object()
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_resend"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralSend, self).post(request, *args, **kwargs)
def form_valid(self, form):
ref = self.get_object()
ref.status = Referral.REFERRAL_STATUS_CHOICES.referred
if ref.sent_date is None:
ref.sent_date = date.today()
ref.expire_date = ref.sent_date + timedelta(days=ref.period)
ref.save()
emailcontext = {}
emailcontext['person'] = ref.referee
emailcontext['application_id'] = ref.application.id
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[ref.application.app_type]
sendHtmlEmail([ref.referee.email], 'Application for Feedback', emailcontext, 'application-assigned-to-referee.html', None, None, None)
# Record an action on the referral's application:
action = Action(
content_object=ref.application, user=self.request.user,
action='Referral to {} sent '.format(ref.referee), category=3)
action.save()
return HttpResponseRedirect(ref.application.get_absolute_url())
class ReferralRemind(LoginRequiredMixin, UpdateView):
model = Referral
form_class = apps_forms.ReferralRemindForm
template_name = 'applications/referral_remind.html'
def get(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
#admin_staff = context_processor['admin_staff']
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_recall_resend"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if referral.status != Referral.REFERRAL_STATUS_CHOICES.referred:
messages.error(self.request, 'This referral is already completed!')
return HttpResponseRedirect(referral.application.get_absolute_url())
return super(ReferralRemind, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralRemind, self).get_context_data(**kwargs)
context['referral'] = self.get_object()
return context
def post(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
app = referral.application
print ('STEP 1')
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
print ('test')
print (flowcontext)
if flowcontext["may_recall_resend"] == "True":
#admin_staff = context_processor['admin_staff']
#if admin_staff == True:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralRemind, self).post(request, *args, **kwargs)
def form_valid(self, form):
ref = self.get_object()
emailcontext = {}
emailcontext['person'] = ref.referee
emailcontext['application_id'] = ref.application.id
emailcontext['application_name'] = Application.APP_TYPE_CHOICES[ref.application.app_type]
sendHtmlEmail([ref.referee.email], 'Application for Feedback Reminder', emailcontext, 'application-assigned-to-referee.html', None, None, None)
action = Action(
content_object=ref.application, user=self.request.user,
action='Referral to {} reminded'.format(ref.referee), category=3)
action.save()
return HttpResponseRedirect(self.get_success_url(ref.application.id))
#ref.application.get_absolute_url())
class ReferralDelete(LoginRequiredMixin, UpdateView):
model = Referral
form_class = apps_forms.ReferralDeleteForm
template_name = 'applications/referral_delete.html'
def get(self, request, *args, **kwargs):
referral = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_delete"] == "True":
return super(ReferralDelete, self).get(request, *args, **kwargs)
else:
if admin_staff == True:
return super(ReferralDelete, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
# if referral.status != Referral.REFERRAL_STATUS_CHOICES.with_admin:
# messages.error(self.request, 'This referral is already completed!')
# return HttpResponseRedirect(referral.application.get_absolute_url())
# return super(ReferralDelete, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReferralDelete, self).get_context_data(**kwargs)
context['referral'] = self.get_object()
return context
def get_success_url(self, application_id):
return reverse('application_refer', args=(application_id,))
def post(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
referral = self.get_object()
app = referral.application
app_type_short_name = None
for i in Application.APP_TYPE_CHOICES._identifier_map:
if Application.APP_TYPE_CHOICES._identifier_map[i] == referral.application.app_type:
app_type_short_name = i
flow = Flow()
flow.get(app_type_short_name)
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, app_type_short_name)
if flowcontext["may_referral_delete"] == "True":
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ReferralDelete, self).post(request, *args, **kwargs)
def form_valid(self, form):
ref = self.get_object()
application_id = ref.application.id
ref.delete()
# Record an action on the referral's application:
action = Action(
content_object=ref.application, user=self.request.user,
action='Referral to {} delete'.format(ref.referee), category=3)
action.save()
return HttpResponseRedirect(self.get_success_url(application_id))
#class ComplianceList(ListView):
# model = Compliance
#
# def get_queryset(self):
# qs = super(ComplianceList, self).get_queryset()
# # Did we pass in a search string? If so, filter the queryset and return
# # it.
# if 'q' in self.request.GET and self.request.GET['q']:
# query_str = self.request.GET['q']
# # Replace single-quotes with double-quotes
# query_str = query_str.replace("'", r'"')
# # Filter by applicant__email, assignee__email, compliance
# query = get_query(
# query_str, ['applicant__email', 'assignee__email', 'compliance'])
# qs = qs.filter(query).distinct()
# return qs
class ComplianceCompleteExternal(LoginRequiredMixin,UpdateView):
model = Compliance
template_name = 'applications/compliance_update_external.html'
form_class = apps_forms.ComplianceCompleteExternal
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Delegate.objects.filter(email_user=self.request.user, organisation=self.object.organisation).count()
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
elif self.request.user == self.object.applicant:
pass
elif org == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
if self.object.status != 2 and self.object.status != 8 and self.object.status != 7:
if self.object.status == 3:
messages.error(self.request, 'The clearance of condition is not due yet.')
else:
messages.error(self.request, 'Unable to complete clearance of condition.')
return HttpResponseRedirect(reverse("home_page_tabs", args=('clearance',)))
return super(ComplianceCompleteExternal, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceCompleteExternal, self).get_context_data(**kwargs)
app = self.get_object()
context['conditions'] = Compliance.objects.filter(id=app.id)
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
compliance = Compliance.objects.get(id=kwargs['pk'])
return HttpResponseRedirect(reverse("home_page_tabs", args=('clearance',)))
return super(ComplianceCompleteExternal, self).post(request, *args, **kwargs)
def get_initial(self):
initial = super(ComplianceCompleteExternal, self).get_initial()
multifilelist = []
#records = self.object.records.all()
#for b1 in records:
# fileitem = {}
# fileitem['fileid'] = b1.id
# fileitem['path'] = b1.upload.name
# fileitem['extension'] = b1.extension
# multifilelist.append(fileitem)
records = self.object.records.all()
multifilelist = []
for b1 in records:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['records'] = multifilelist
return initial
def form_valid(self, form):
self.object = form.save(commit=False)
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
self.object.save()
if self.request.POST.get('save'):
messages.success(self.request, 'Successfully Saved')
return HttpResponseRedirect(reverse("compliance_approval_update_external", args=(self.object.id,)))
group = Group.objects.get(name='Statdev Assessor')
self.object.group = group
self.object.status = 5
self.object.save()
return HttpResponseRedirect(reverse("home_page_tabs", args=('clearance',)))
class ComplianceViewExternal(LoginRequiredMixin,DetailView):
# model = Approval
model = Compliance
template_name = 'applications/compliance_view_external.html'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Delegate.objects.filter(email_user=self.request.user, organisation=self.object.organisation).count()
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
elif self.request.user == self.object.applicant:
pass
elif org == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceViewExternal, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceViewExternal, self).get_context_data(**kwargs)
app = self.get_object()
# context['conditions'] = Compliance.objects.filter(approval_id=app.id)
context['conditions'] = Compliance.objects.filter(id=app.id)
return context
class ComplianceApprovalInternal(LoginRequiredMixin,UpdateView):
model = Compliance
template_name = 'applications/compliance_update_internal.html'
form_class = apps_forms.ComplianceCompleteInternal
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Delegate.objects.filter(email_user=self.request.user, organisation=self.object.organisation).count()
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
elif self.request.user == self.object.applicant:
pass
elif org == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceApprovalInternal, self).get(request, *args, **kwargs)
def get_initial(self):
initial = super(ComplianceApprovalInternal, self).get_initial()
multifilelist = []
initial['status'] = self.object.status
print ("STATUS")
print (initial['status'])
external_documents = self.object.external_documents.all()
print ("EXTERNAL DOC")
print (external_documents)
multifilelist = []
for b1 in external_documents:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['external_documents'] = multifilelist
return initial
def get_context_data(self, **kwargs):
context = super(ComplianceApprovalInternal, self).get_context_data(**kwargs)
app = self.get_object()
# context['conditions'] = Compliance.objects.filter(approval_id=app.id)
context['conditions'] = Compliance.objects.filter(id=app.id)
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse("compliance_list",))
return super(ComplianceApprovalInternal, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
action = self.request.POST.get('action')
external_comments = self.request.POST.get('external_comments','')
internal_comments = self.request.POST.get('internal_comments','')
internal_documents = self.request.POST.get('internal_documents')
if action == '1':
self.object.status = 4
self.object.assessed_by = self.request.user
self.object.assessed_date = date.today()
self.object.assignee = None
messages.success(self.request, "Compliance has been approved.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been approved')
action.save()
if len(internal_comments) > 0:
approval = Approval.objects.get(id=self.object.approval_id)
comms = CommunicationApproval.objects.create(approval=approval,comms_type=4,comms_to=str('Approved'), comms_from='', subject='internal comment', details=internal_comments)
if 'internal_documents_json' in self.request.POST:
if is_json(self.request.POST['internal_documents_json']) is True:
json_data = json.loads(self.request.POST['internal_documents_json'])
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
comms.records.add(doc)
comms.save()
emailcontext = {}
emailcontext['app'] = self.object
emailcontext['person'] = self.object.submitted_by
emailcontext['body'] = "Your clearance of condition has been approved"
sendHtmlEmail([self.object.submitted_by.email], 'Clearance of condition has been approved', emailcontext, 'clearance-approved.html', None, None, None)
elif action == '2':
self.object.status = 6
#self.object.group
approver = Group.objects.get(name='Statdev Approver')
self.object.assignee = None
self.object.group = approver
messages.success(self.request, "Compliance has been assigned to the manager group.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance assigned to Manager')
action.save()
if len(internal_comments) > 0:
approval = Approval.objects.get(id=self.object.approval_id)
comms = CommunicationApproval.objects.create(approval=approval,comms_type=4,comms_to=str('Sent to Manager'), comms_from='', subject='internal comment', details=internal_comments)
if 'internal_documents_json' in self.request.POST:
if is_json(self.request.POST['internal_documents_json']) is True:
json_data = json.loads(self.request.POST['internal_documents_json'])
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
comms.records.add(doc)
comms.save()
emailcontext = {}
emailcontext['clearance_id'] = self.object.id
emailGroup('Clearance of Condition Assigned to Manager Group', emailcontext, 'clearance-of-condition-assigned-groups.html', None, None, None, 'Statdev Approver')
elif action == '3':
self.object.status = 7
self.object.group = None
self.object.assignee = None
messages.success(self.request, "Compliance has been assigned to the holder.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been return to holder')
action.save()
if len(external_comments) > 0:
approval = Approval.objects.get(id=self.object.approval_id)
comms = CommunicationApproval.objects.create(approval=approval,comms_type=4,comms_to=str('Return to licence holder'), comms_from='', subject='external comment', details=external_comments)
comms.save()
if len(internal_comments) > 0:
approval = Approval.objects.get(id=self.object.approval_id)
comms = CommunicationApproval.objects.create(approval=approval,comms_type=4,comms_to=str('Sent to Manager'), comms_from='', subject='internal comment', details=internal_comments)
if 'internal_documents_json' in self.request.POST:
if is_json(self.request.POST['internal_documents_json']) is True:
json_data = json.loads(self.request.POST['internal_documents_json'])
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
comms.records.add(doc)
comms.save()
emailcontext = {}
emailcontext['app'] = self.object
emailcontext['person'] = self.object.submitted_by
emailcontext['body'] = "Your clearance of condition requires additional information."
sendHtmlEmail([self.object.submitted_by.email], 'Your clearance of condition requires additional information please login and resubmit with additional information.', emailcontext, 'clearance-holder.html', None, None, None)
elif action == '4':
self.object.status = 5
self.object.group = None
self.object.assignee = None
assigngroup = Group.objects.get(name='Statdev Assessor')
self.object.group = assigngroup
messages.success(self.request, "Compliance has been assigned to the assessor.")
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.assign, user=self.request.user,
action='Compliance has been returned to assessor')
action.save()
if len(internal_comments) > 0:
approval = Approval.objects.get(id=self.object.approval_id)
comms = CommunicationApproval.objects.create(approval=approval,comms_type=4,comms_to=str('Return to assessor'), comms_from='', subject='internal comment', details=internal_comments)
if 'internal_documents_json' in self.request.POST:
if is_json(self.request.POST['internal_documents_json']) is True:
json_data = json.loads(self.request.POST['internal_documents_json'])
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
comms.records.add(doc)
comms.save()
emailcontext = {}
emailcontext['clearance_id'] = self.object.id
emailGroup('Clearance of condition assigned to Assessor Group', emailcontext, 'clearance-of-condition-assigned-groups.html', None, None, None, 'Statdev Assessor')
else:
raise ValidationError("ERROR, no action found: "+str(action))
if 'external_documents_json' in self.request.POST:
if is_json(self.request.POST['external_documents_json']) is True:
json_data = json.loads(self.request.POST['external_documents_json'])
self.object.external_documents.remove()
for d in self.object.external_documents.all():
self.object.external_documents.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
doc.file_group = 2006
doc.save()
self.object.external_documents.add(doc)
self.object.save()
return HttpResponseRedirect(reverse("compliance_list",))
class ComplianceApprovalDetails(LoginRequiredMixin,DetailView):
# model = Approval
model = Compliance
template_name = 'applications/compliance_detail.html'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Delegate.objects.filter(email_user=self.request.user, organisation=self.object.organisation).count()
if admin_staff == True:
pass
elif self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
pass
elif self.request.user == self.object.applicant:
pass
elif org == 1:
pass
else:
messages.error(self.request, 'Forbidden from viewing this page.')
return HttpResponseRedirect("/")
return super(ComplianceApprovalDetails, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ComplianceApprovalDetails, self).get_context_data(**kwargs)
app = self.get_object()
# context['conditions'] = Compliance.objects.filter(approval_id=app.id)
context['conditions'] = Compliance.objects.filter(id=app.id)
return context
class ComplianceSubmitComplete(LoginRequiredMixin,DetailView):
# model = Approval
model = Compliance
template_name = 'applications/compliance_complete.html'
def get_context_data(self, **kwargs):
context = super(ComplianceSubmitComplete, self).get_context_data(**kwargs)
app = self.get_object()
# context['conditions'] = Compliance.objects.filter(approval_id=app.id)
context['conditions'] = Compliance.objects.filter(id=app.id)
return context
class ComplianceComplete(LoginRequiredMixin,UpdateView):
model = Compliance
template_name = 'applications/compliance_update.html'
form_class = apps_forms.ComplianceComplete
def get_context_data(self, **kwargs):
context = super(ComplianceComplete, self).get_context_data(**kwargs)
app = self.get_object()
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
compliance = Compliance.objects.get(id=kwargs['pk'])
return HttpResponseRedirect(reverse("compliance_approval_detail", args=(compliance.approval_id,)))
return super(ComplianceComplete, self).post(request, *args, **kwargs)
def get_initial(self):
initial = super(ComplianceComplete, self).get_initial()
multifilelist = []
records = self.object.records.all()
for b1 in records:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['records'] = multifilelist
return initial
def form_valid(self, form):
self.object = form.save(commit=False)
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
#for filelist in self.object.records.all():
# if 'records-clear_multifileid-' + str(filelist.id) in form.data:
# self.object.records.remove(filelist)
#if self.request.FILES.get('records'):
# if Attachment_Extension_Check('multi', self.request.FILES.getlist('records'), None) is False:
# raise ValidationError('Documents contains and unallowed attachment extension.')
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.name = f.name
# # print f.name
# doc.save()
# self.object.records.add(doc)
# # print self.object.records
self.object.save()
#form.save()
#form.save_m2m()
return HttpResponseRedirect(reverse("compliance_approval_detail", args=(self.object.id,)))
# this is theory shoudl be able to be deleted. need to chekc first.
class ComplianceCreate(LoginRequiredMixin, ModelFormSetView):
model = Compliance
form_class = apps_forms.ComplianceCreateForm
template_name = 'applications/compliance_formset.html'
fields = ['condition', 'compliance']
def get_application(self):
return Application.objects.get(pk=self.kwargs['pk'])
def get_context_data(self, **kwargs):
context = super(ComplianceCreate, self).get_context_data(**kwargs)
app = self.get_application()
context['application'] = app
return context
def get_initial(self):
# Return a list of dicts, each containing a reference to one condition.
app = self.get_application()
conditions = app.condition_set.all()
return [{'condition': c} for c in conditions]
def get_factory_kwargs(self):
kwargs = super(ComplianceCreate, self).get_factory_kwargs()
app = self.get_application()
conditions = app.condition_set.all()
# Set the number of forms in the set to equal the number of conditions.
kwargs['extra'] = len(conditions)
return kwargs
def get_extra_form_kwargs(self):
kwargs = super(ComplianceCreate, self).get_extra_form_kwargs()
kwargs['application'] = self.get_application()
return kwargs
def formset_valid(self, formset):
for form in formset:
data = form.cleaned_data
# If text has been input to the compliance field, create a new
# compliance object.
if 'compliance' in data and data.get('compliance', None):
new_comp = form.save(commit=False)
new_comp.applicant = self.request.user
new_comp.application = self.get_application()
new_comp.submit_date = date.today()
# TODO: handle the uploaded file.
new_comp.save()
# Record an action on the compliance request's application:
action = Action(
content_object=new_comp.application, user=self.request.user,
action='Request for compliance created')
action.save()
messages.success(
self.request, 'New requests for compliance have been submitted.')
return super(ComplianceCreate, self).formset_valid(formset)
def get_success_url(self):
return reverse('application_detail', args=(self.get_application().pk,))
class WebPublish(LoginRequiredMixin, UpdateView):
model = Application
form_class = apps_forms.ApplicationWebPublishForm
template_name = "applications/application_publish_form.html"
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
return super(WebPublish, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_update', args=(self.kwargs['pk'],))
def get_context_data(self, **kwargs):
context = super(WebPublish,
self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['pk'])
#context['file_group'] = '2003'
#context['file_group_ref_id'] = self.kwargs['pk']
return context
def get_initial(self):
initial = super(WebPublish, self).get_initial()
initial['application'] = self.kwargs['pk']
current_date = datetime.now().strftime('%d/%m/%Y')
publish_type = self.kwargs['publish_type']
if publish_type in 'received':
initial['publish_documents'] = current_date
elif publish_type in 'draft':
initial['publish_draft_report'] = current_date
elif publish_type in 'final':
initial['publish_final_report'] = current_date
elif publish_type in 'determination':
initial['publish_determination_report'] = current_date
initial['publish_type'] = self.kwargs['publish_type']
# try:
# pub_news = PublicationNewspaper.objects.get(
# application=self.kwargs['pk'])
# except:
# pub_news = None
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(WebPublish, self).post(request, *args, **kwargs)
def form_valid(self, form):
forms_data = form.cleaned_data
self.object = form.save(commit=True)
publish_type = self.kwargs['publish_type']
current_date = datetime.now().strftime('%Y-%m-%d')
if publish_type in 'received':
self.object.publish_documents = current_date
action = Action(
content_object=self.object, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.publish,
action='Application Publish (Received) expiring ('+self.object.publish_documents_expiry.strftime('%m/%d/%Y %H:%M')+')')
action.save()
elif publish_type in 'draft':
action = Action(
content_object=self.object, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.publish,
action='Application Published (Draft) expiring ('+self.object.publish_draft_expiry.strftime('%m/%d/%Y %H:%M')+')')
action.save()
self.object.publish_draft_report = current_date
elif publish_type in 'final':
action = Action(
content_object=self.object, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.publish,
action='Application Published (Final) expiring ('+self.object.publish_final_expiry.strftime('%m/%d/%Y %H:%M')+')')
action.save()
self.object.publish_final_report = current_date
elif publish_type in 'determination':
action = Action(
content_object=self.object, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.publish,
action='Application Published (Determination)')
action.save()
self.object.publish_determination_report = current_date
return super(WebPublish, self).form_valid(form)
class NewsPaperPublicationCreate(LoginRequiredMixin, CreateView):
model = PublicationNewspaper
form_class = apps_forms.NewsPaperPublicationCreateForm
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
# if flowcontext.state != app.APP_STATE_CHOICES.draft:
if flowcontext["may_update_publication_newspaper"] != "True":
messages.error(
self.request, "Can't add new newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
return super(NewsPaperPublicationCreate, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.kwargs['pk'],))
def get_context_data(self, **kwargs):
context = super(NewsPaperPublicationCreate,
self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['pk'])
return context
def get_initial(self):
initial = super(NewsPaperPublicationCreate, self).get_initial()
initial['application'] = self.kwargs['pk']
# try:
# pub_news = PublicationNewspaper.objects.get(
# application=self.kwargs['pk'])
# except:
# pub_news = None
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(NewsPaperPublicationCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
forms_data = form.cleaned_data
self.object = form.save(commit=True)
#if self.request.FILES.get('records'):
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.records.add(doc)
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
action = Action(
content_object=self.object.application, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.create,
action='Newspaper Publication ({} {}) '.format(self.object.newspaper, self.object.date) )
action.save()
return super(NewsPaperPublicationCreate, self).form_valid(form)
class NewsPaperPublicationUpdate(LoginRequiredMixin, UpdateView):
model = PublicationNewspaper
form_class = apps_forms.NewsPaperPublicationCreateForm
def get(self, request, *args, **kwargs):
#app = self.get_object().application_set.first()
PubNew = PublicationNewspaper.objects.get(pk=self.kwargs['pk'])
app = Application.objects.get(pk=PubNew.application.id)
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_newspaper"] != "True":
messages.error(self.request, "Can't update newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
# Rule: can only change a vessel if the parent application is status
# 'draft'.
# if app.state != Application.APP_STATE_CHOICES.draft:
# messages.error(
# self.request, 'You can only change a publication details when the application is "draft" status')
# return HttpResponseRedirect(app.get_absolute_url())
return super(NewsPaperPublicationUpdate, self).get(request, *args, **kwargs)
def get_initial(self):
initial = super(NewsPaperPublicationUpdate, self).get_initial()
# initial['application'] = self.kwargs['pk']
pub_news = None
try:
pub_news = PublicationNewspaper.objects.get(pk=self.kwargs['pk'])
except:
pub_news = None
multifilelist = []
a1 = pub_news.records.all()
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['records'] = multifilelist
return initial
def get_context_data(self, **kwargs):
context = super(NewsPaperPublicationUpdate, self).get_context_data(**kwargs)
context['page_heading'] = '' #'Update Newspaper Publication details'
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
# print self.get_object().application.pk
# app = self.get_object().application_set.first()
return HttpResponseRedirect(reverse('application_detail', args=(self.get_object().application.pk,)))
return super(NewsPaperPublicationUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
app = Application.objects.get(pk=self.object.application.id)
pub_news = PublicationNewspaper.objects.get(pk=self.kwargs['pk'])
records = pub_news.records.all()
for filelist in records:
if 'records-clear_multifileid-' + str(filelist.id) in form.data:
pub_news.records.remove(filelist)
#if self.request.FILES.get('records'):
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.records.add(doc)
if 'records_json' in self.request.POST:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
action = Action(
content_object=self.object.application, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Newspaper Publication ({} {}) '.format(self.object.newspaper, self.object.date) )
action.save()
return HttpResponseRedirect(app.get_absolute_url())
class NewsPaperPublicationDelete(LoginRequiredMixin, DeleteView):
model = PublicationNewspaper
def get(self, request, *args, **kwargs):
modelobject = self.get_object()
PubNew = PublicationNewspaper.objects.get(pk=self.kwargs['pk'])
app = Application.objects.get(pk=PubNew.application.id)
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_newspaper"] != "True":
messages.error(self.request, "Can't delete newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
# Rule: can only delete a condition if the parent application is status
return super(NewsPaperPublicationDelete, self).get(request, *args, **kwargs)
# else:
# messages.warning(self.request, 'You cannot delete this condition')
# return HttpResponseRedirect(condition.application.get_absolute_url())
def get_success_url(self):
return reverse('application_detail', args=(self.get_object().application.pk,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_success_url())
# Generate an action.
modelobject = self.get_object()
action = Action(
content_object=modelobject.application, user=self.request.user,
action='Delete Newspaper Publication {} deleted (status: {})'.format(modelobject.pk, 'delete'))
action.save()
messages.success(self.request, 'Newspaper Publication {} has been deleted'.format(modelobject.pk))
return super(NewsPaperPublicationDelete, self).post(request, *args, **kwargs)
class WebsitePublicationChange(LoginRequiredMixin, CreateView):
model = PublicationWebsite
form_class = apps_forms.WebsitePublicationForm
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_website"] != "True":
messages.error(self.request, "Can't update ebsite publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
return super(WebsitePublicationChange, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.kwargs['pk'],))
# def get_success_url(self):
# print self.kwargs['pk']
# return reverse('application_detail', args=(self.get_object().application.pk,))
# return reverse('application_detail', args=(self.kwargs['pk']))
def get_context_data(self, **kwargs):
context = super(WebsitePublicationChange,self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['pk'])
return context
def get_initial(self):
initial = super(WebsitePublicationChange, self).get_initial()
initial['application'] = self.kwargs['pk']
# doc = Record.objects.get(pk=self.kwargs['docid'])
# print self.kwargs['docid']
# print PublicationWebsite.objects.get(original_document_id=self.kwargs['docid'])
try:
pub_web = PublicationWebsite.objects.get(original_document_id=self.kwargs['docid'])
except:
pub_web = None
if pub_web:
initial['published_document'] = pub_web.published_document
#filelist = []
#if pub_web:
# if pub_web.published_document:
# # records = pub_news.records.all()
# fileitem = {}
# fileitem['fileid'] = pub_web.published_document.id
# fileitem['path'] = pub_web.published_document.upload.name
# fileitem['name'] = pub_web.published_document.name
# fileitem['short_name'] = pub_web.published_document.upload.name[19:]
# filelist.append(fileitem)
#if pub_web:
# if pub_web.id:
# initial['id'] = pub_web.id
# print "hello"
#initial['published_document'] = filelist
#doc = Record.objects.get(pk=self.kwargs['docid'])
#initial['original_document'] = doc
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(WebsitePublicationChange, self).post(request, *args, **kwargs)
def form_valid(self, form):
forms_data = form.cleaned_data
self.object = form.save(commit=False)
pub_web = None
# print "THE"
try:
pub_web = PublicationWebsite.objects.get(original_document_id=self.kwargs['docid'])
except:
pub_web = None
# if pub_web:
# self.object.id = pub_web.id
# self.object.published_document = pub_web.published_document
# if pub_web.published_document:
# if 'published_document-clear_multifileid-' + str(pub_web.published_document.id) in self.request.POST:
# self.object.published_document = None
orig_doc = Record.objects.get(id=self.kwargs['docid'])
self.object.original_document = orig_doc
# print "SSS"
# print self.request.FILES.get('published_document')
# print self.request.POST
if 'published_document_json' in self.request.POST:
if is_json(self.request.POST['published_document_json']) is True:
json_data = json.loads(self.request.POST['published_document_json'])
if 'doc_id' in json_data:
try:
pub_obj = PublicationWebsite.objects.get(original_document_id=self.kwargs['docid'])
pub_obj.delete()
except:
pass
new_doc = Record.objects.get(id=json_data['doc_id'])
self.object.published_document = new_doc
else:
pub_obj = PublicationWebsite.objects.get(original_document_id=self.kwargs['docid'])
pub_obj.delete()
# else:
# self.object.remove()
# print json_data
# self.object.published_document.remove()
# for d in self.object.published_document.all():
# self.object.published_document.remove(d)
# for i in json_data:
# doc = Record.objects.get(id=i['doc_id'])
# self.object.published_document = i['doc_id']
# self.object.save()
# if self.request.FILES.get('published_document'):
# for f in self.request.FILES.getlist('published_document'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.published_document = doc
app = Application.objects.get(pk=self.kwargs['pk'])
action = Action(
content_object=app, user=self.request.user, category=Action.ACTION_CATEGORY_CHOICES.change,
action='Publish New Web Documents for Doc ID: {}'.format(self.kwargs['docid']))
action.save()
return super(WebsitePublicationChange, self).form_valid(form)
class FeedbackPublicationCreate(LoginRequiredMixin, CreateView):
model = PublicationFeedback
form_class = apps_forms.FeedbackPublicationCreateForm
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_feedback_review"] == "True":
return super(FeedbackPublicationCreate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_draft"] == "True":
return super(FeedbackPublicationCreate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_final"] == "True":
return super(FeedbackPublicationCreate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_determination"] == "True":
return super(FeedbackPublicationCreate, self).get(request, *args, **kwargs)
else:
messages.error(
self.request, "Can't add new newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
# if app.state != app.APP_STATE_CHOICES.draft:
# messages.errror(
# self.request, "Can't add new feedback publication to this application")
# return HttpResponseRedirect(app.get_absolute_url())
# return super(FeedbackPublicationCreate, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.kwargs['pk'],))
def get_context_data(self, **kwargs):
context = super(FeedbackPublicationCreate,
self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['pk'])
return context
def get_initial(self):
initial = super(FeedbackPublicationCreate, self).get_initial()
initial['application'] = self.kwargs['pk']
if self.kwargs['status'] == 'review':
initial['status'] = 'review'
elif self.kwargs['status'] == 'final':
initial['status'] = 'final'
elif self.kwargs['status'] == 'determination':
initial['status'] = 'determination'
else:
initial['status'] = 'draft'
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(FeedbackPublicationCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=True)
# print self.object.records
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
#if self.request.FILES.get('records'):
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.records.add(doc)
return super(FeedbackPublicationCreate, self).form_valid(form)
class FeedbackPublicationView(LoginRequiredMixin, DetailView):
model = PublicationFeedback
template_name = 'applications/application_feedback_view.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden Access.')
return HttpResponseRedirect("/")
return super(FeedbackPublicationView, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.kwargs['application'],))
def get_context_data(self, **kwargs):
context = super(FeedbackPublicationView,
self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['application'])
return context
class FeedbackPublicationUpdate(LoginRequiredMixin, UpdateView):
model = PublicationFeedback
form_class = apps_forms.FeedbackPublicationCreateForm
def get(self, request, *args, **kwargs):
modelobject = self.get_object()
app = modelobject.application
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_feedback_review"] == "True":
return super(FeedbackPublicationUpdate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_draft"] == "True":
return super(FeedbackPublicationUpdate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_final"] == "True":
return super(FeedbackPublicationUpdate, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_determination"] == "True":
return super(FeedbackPublicationUpdate, self).get(request, *args, **kwargs)
else:
messages.error(
self.request, "Can't change feedback publication for this application")
return HttpResponseRedirect(app.get_absolute_url())
# return HttpResponseRedirect(app.get_absolute_url())
# app = Application.objects.get(pk=self.kwargs['application'])
# if app.state != app.APP_STATE_CHOICES.draft:
# messages.errror(
# self.request, "Can't add new newspaper publication to this application")
# return HttpResponseRedirect(app.get_absolute_url())
# return super(FeedbackPublicationUpdate, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.kwargs['application'],))
def get_context_data(self, **kwargs):
context = super(FeedbackPublicationUpdate,
self).get_context_data(**kwargs)
context['application'] = Application.objects.get(pk=self.kwargs['application'])
return context
def get_initial(self):
initial = super(FeedbackPublicationUpdate, self).get_initial()
initial['application'] = self.kwargs['application']
try:
pub_feed = PublicationFeedback.objects.get(
pk=self.kwargs['pk'])
except:
pub_feed = None
#multifilelist = []
#if pub_feed:
# records = pub_feed.records.all()
# for b1 in records:
# fileitem = {}
# fileitem['fileid'] = b1.id
# fileitem['path'] = b1.upload.name
# fileitem['extension'] = b1.extension
# multifilelist.append(fileitem)
#initial['records'] = multifilelist
multifilelist = []
a1 = pub_feed.records.all()
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['records'] = multifilelist
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['application'])
return HttpResponseRedirect(app.get_absolute_url())
return super(FeedbackPublicationUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
app = Application.objects.get(pk=self.object.application.id)
pub_feed = PublicationFeedback.objects.get(pk=self.kwargs['pk'])
if 'records_json' in self.request.POST:
if is_json(self.request.POST['records_json']) is True:
json_data = json.loads(self.request.POST['records_json'])
self.object.records.remove()
for d in self.object.records.all():
self.object.records.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.records.add(doc)
#records = pub_feed.records.all()
#for filelist in records:
# if 'records-clear_multifileid-' + str(filelist.id) in form.data:
# pub_feed.records.remove(filelist)
#if self.request.FILES.get('records'):
# for f in self.request.FILES.getlist('records'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.records.add(doc)
return super(FeedbackPublicationUpdate, self).form_valid(form)
class FeedbackPublicationDelete(LoginRequiredMixin, DeleteView):
model = PublicationFeedback
def get(self, request, *args, **kwargs):
modelobject = self.get_object()
app = modelobject.application
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_publication_feedback_review"] == "True":
return super(FeedbackPublicationDelete, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_draft"] == "True":
return super(FeedbackPublicationDelete, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_final"] == "True":
return super(FeedbackPublicationDelete, self).get(request, *args, **kwargs)
elif flowcontext["may_update_publication_feedback_determination"] == "True":
return super(FeedbackPublicationDelete, self).get(request, *args, **kwargs)
else:
messages.error(
self.request, "Can't delete feedback publication for this application")
return HttpResponseRedirect(app.get_absolute_url())
return super(FeedbackPublicationDelete, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.get_object().application.pk,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_success_url())
# Generate an action.
modelobject = self.get_object()
action = Action(
content_object=modelobject.application, user=self.request.user,
action='Delete Feedback Publication {} deleted (status: {})'.format(modelobject.pk, 'delete'))
action.save()
messages.success(self.request, 'Newspaper Feedback {} has been deleted'.format(modelobject.pk))
return super(FeedbackPublicationDelete, self).post(request, *args, **kwargs)
class ConditionCreate(LoginRequiredMixin, CreateView):
"""A view for a referee or an internal user to create a Condition object
on an Application.
"""
model = Condition
form_class = apps_forms.ConditionCreateForm
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_create_condition"] != "True":
messages.error(
self.request, "Can't add new newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
# Rule: conditions can be created when the app is with admin, with
# referee or with assessor.
#if app.app_type == app.APP_TYPE_CHOICES.emergency:
# if app.state != app.APP_STATE_CHOICES.draft or app.assignee != self.request.user:
# messages.error(
# self.request, 'New conditions cannot be created for this application!')
# return HttpResponseRedirect(app.get_absolute_url())
#elif app.state not in [app.APP_STATE_CHOICES.with_admin, app.APP_STATE_CHOICES.with_referee, app.APP_STATE_CHOICES.with_assessor]:
# messages.error(
# self.request, 'New conditions cannot be created for this application!')
# return HttpResponseRedirect(app.get_absolute_url())
return super(ConditionCreate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ConditionCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create a new condition'
return context
def get_initial(self):
initial = super(ConditionCreate, self).get_initial()
app = Application.objects.get(pk=self.kwargs['pk'])
condition_no_max = 1
advise_no_max = 1
condition_no_obj = Condition.objects.filter(application=app).aggregate(Max('condition_no'))
advise_no_obj = Condition.objects.filter(application=app).aggregate(Max('advise_no'))
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(self.request, flowcontext, app.routeid, workflowtype)
if condition_no_obj['condition_no__max'] is not None:
condition_no_max = condition_no_obj['condition_no__max'] + 1
if advise_no_obj['advise_no__max'] is not None:
advise_no_max = advise_no_obj['advise_no__max'] + 1
# condition = self.get_object()
# print condition.application.id
#flow = Flow()
#workflowtype = flow.getWorkFlowTypeFromApp(condition.application)
#flow.get(workflowtype)
#DefaultGroups = flow.groupList()
#flowcontext = {}
#flowcontext = flow.getAccessRights(self.request, flowcontext, condition.application.routeid, workflowtype)
initial['may_assessor_advise'] = flowcontext["may_assessor_advise"]
#initial['may_assessor_advise'] = 'df'
initial['assessor_staff'] = False
if self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
initial['assessor_staff'] = True
initial['condition_no'] = condition_no_max
initial['advise_no'] = advise_no_max
return initial
def get_success_url(self):
"""Override to redirect to the condition's parent application detail view.
"""
return "/"
return reverse('application_update', args=(self.object.application.pk,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(ConditionCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
app = Application.objects.get(pk=self.kwargs['pk'])
self.object = form.save(commit=False)
self.object.application = app
# If a referral exists for the parent application for this user,
# link that to the new condition.
if Referral.objects.filter(application=app, referee=self.request.user).exists():
self.object.referral = Referral.objects.get(
application=app, referee=self.request.user)
# If the request user is not in the "Referee" group, then assume they're an internal user
# and set the new condition to "applied" status (default = "proposed").
referee = Group.objects.get(name='Statdev Referee')
if referee not in self.request.user.groups.all():
self.object.status = Condition.CONDITION_STATUS_CHOICES.applied
self.object.save()
# Record an action on the application:
action = Action(
content_object=app, category=Action.ACTION_CATEGORY_CHOICES.create, user=self.request.user,
action='Created condition {} (status: {})'.format(self.object.pk, self.object.get_status_display()))
action.save()
messages.success(self.request, 'Condition {} Created'.format(self.object.pk))
return super(ConditionCreate, self).form_valid(form)
class ConditionUpdate(LoginRequiredMixin, UpdateView):
"""A view to allow an assessor to update a condition that might have been
proposed by a referee.
The ``action`` kwarg is used to define the new state of the condition.
"""
model = Condition
def get(self, request, *args, **kwargs):
condition = self.get_object()
app = condition.application
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_create_condition"] != "True":
messages.error(
self.request, "Can't add new newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
# Rule: can only change a condition if the parent application is status
# 'with assessor' or 'with_referee' unless it is an emergency works.
if condition.application.app_type == Application.APP_TYPE_CHOICES.emergency:
if condition.application.state != Application.APP_STATE_CHOICES.draft:
messages.error(
self.request, 'You can not change conditions when the application has been issued')
return HttpResponseRedirect(condition.application.get_absolute_url())
elif condition.application.assignee != self.request.user:
messages.error(
self.request, 'You can not change conditions when the application is not assigned to you')
return HttpResponseRedirect(condition.application.get_absolute_url())
else:
return super(ConditionUpdate, self).get(request, *args, **kwargs)
#elif condition.application.state not in [Application.APP_STATE_CHOICES.with_assessor, Application.APP_STATE_CHOICES.with_referee]:
# messages.error(
# self.request, 'You can only change conditions when the application is "with assessor" or "with referee" status')
# return HttpResponseRedirect(condition.application.get_absolute_url())
# Rule: can only change a condition if the request user is an Assessor
# or they are assigned the referral to which the condition is attached
# and that referral is not completed.
assessor = Group.objects.get(name='Statdev Assessor')
ref = condition.referral
if assessor in self.request.user.groups.all() or (ref and ref.referee == request.user and ref.status == Referral.REFERRAL_STATUS_CHOICES.referred):
messages.success(self.request, 'Condition Successfully added')
return super(ConditionUpdate, self).get(request, *args, **kwargs)
else:
messages.warning(self.request, 'You cannot update this condition')
return super(ConditionUpdate, self).get(request, *args, **kwargs)
return HttpResponseRedirect(condition.application.get_absolute_url())
def get_initial(self):
initial = super(ConditionUpdate, self).get_initial()
condition = self.get_object()
# print condition.application.id
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(condition.application)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(self.request, flowcontext, condition.application.routeid, workflowtype)
initial['may_assessor_advise'] = flowcontext["may_assessor_advise"]
initial['assessor_staff'] = False
if self.request.user.groups.filter(name__in=['Statdev Assessor']).exists():
initial['assessor_staff'] = True
return initial
def get_success_url(self):
"""Override to redirect to the condition's parent application detail view.
"""
return "/"
def get_form_class(self):
# Updating the condition as an 'action' should not allow the user to
# change the condition text.
if 'action' in self.kwargs:
return apps_forms.ConditionActionForm
return apps_forms.ConditionUpdateForm
def get_context_data(self, **kwargs):
context = super(ConditionUpdate, self).get_context_data(**kwargs)
if 'action' in self.kwargs:
if self.kwargs['action'] == 'apply':
context['page_heading'] = 'Apply a proposed condition'
elif self.kwargs['action'] == 'reject':
context['page_heading'] = 'Reject a proposed condition'
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(ConditionUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
if 'action' in self.kwargs:
if self.kwargs['action'] == 'apply':
self.object.status = Condition.CONDITION_STATUS_CHOICES.applied
elif self.kwargs['action'] == 'reject':
self.object.status = Condition.CONDITION_STATUS_CHOICES.rejected
# Generate an action:
action = Action(
content_object=self.object.application, user=self.request.user,
action='Condition {} updated (status: {})'.format(self.object.pk, self.object.get_status_display()))
action.save()
self.object.save()
messages.success(self.request, "Successfully Applied")
#return HttpResponseRedirect("/")
return super(ConditionUpdate, self).form_valid(form)
return HttpResponseRedirect(self.object.application.get_absolute_url()+'')
class ConditionDelete(LoginRequiredMixin, DeleteView):
model = Condition
def get(self, request, *args, **kwargs):
condition = self.get_object()
app = condition.application
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext["may_create_condition"] != "True":
messages.error(
self.request, "Can't add new newspaper publication to this application")
return HttpResponseRedirect(app.get_absolute_url())
return super(ConditionDelete, self).get(request, *args, **kwargs)
# Rule: can only delete a condition if the parent application is status
# 'with referral' or 'with assessor'. Can also delete if you are the user assigned
# to an Emergency Works
#if condition.application.app_type != Application.APP_TYPE_CHOICES.emergency:
# if condition.application.state not in [Application.APP_STATE_CHOICES.with_assessor, Application.APP_STATE_CHOICES.with_referee]:
# messages.warning(self.request, 'You cannot delete this condition')
# return HttpResponseRedirect(condition.application.get_absolute_url())
# # Rule: can only delete a condition if the request user is an Assessor
# # or they are assigned the referral to which the condition is attached
# # and that referral is not completed.
# assessor = Group.objects.get(name='Statdev Assessor')
# ref = condition.referral
# if assessor in self.request.user.groups.all() or (ref and ref.referee == request.user and ref.status == Referral.REFERRAL_STATUS_CHOICES.referred):
# return super(ConditionDelete, self).get(request, *args, **kwargs)
# else:
# messages.warning(self.request, 'You cannot delete this condition')
# return HttpResponseRedirect(condition.application.get_absolute_url())
#else:
# Rule: can only delete a condition if the request user is the assignee and the application
# has not been issued.
#if condition.application.assignee == request.user and condition.application.state != Application.APP_STATE_CHOICES.issued:
# return super(ConditionDelete, self).get(request, *args, **kwargs)
#else:
# messages.warning(self.request, 'You cannot delete this condition')
# return HttpResponseRedirect(condition.application.get_absolute_url())
def get_success_url(self):
return reverse('application_detail', args=(self.get_object().application.pk,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_success_url())
# Generate an action.
condition = self.get_object()
action = Action(
content_object=condition.application, user=self.request.user,
action='Condition {} deleted (status: {})'.format(condition.pk, condition.get_status_display()))
action.save()
#messages.success(self.request, 'Condition {} has been deleted'.format(condition.pk))
return super(ConditionDelete, self).post(request, *args, **kwargs)
class ConditionSuspension(LoginRequiredMixin, UpdateView):
model = Condition
form_class = apps_forms.ConditionSuspension
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
else:
messages.error(self.request, 'Forbidden Access.')
return HttpResponseRedirect("/")
return super(ConditionSuspension, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_detail', args=(self.get_object().application.pk,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_success_url())
# Generate an action.
return super(ConditionSuspension, self).post(request, *args, **kwargs)
def get_initial(self):
initial = super(ConditionSuspension, self).get_initial()
initial['actionkwargs'] = self.kwargs['action']
return initial
def form_valid(self, form):
self.object = form.save(commit=False)
actionkwargs = self.kwargs['action']
if actionkwargs == 'suspend':
self.object.suspend = True
elif actionkwargs == 'unsuspend':
self.object.suspend = False
action = Action(
content_object=self.object, user=self.request.user,
action='Condition {} suspend (status: {})'.format(self.object.pk, self.object.get_status_display()))
action.save()
messages.success(self.request, 'Condition {} has been suspended'.format(self.object.pk))
return super(ConditionSuspension, self).form_valid(form)
class VesselCreate(LoginRequiredMixin, CreateView):
model = Vessel
form_class = apps_forms.VesselForm
def get(self, request, *args, **kwargs):
app = Application.objects.get(pk=self.kwargs['pk'])
# action = self.kwargs['action']
flow = Flow()
flowcontext = {}
if app.assignee:
flowcontext['application_assignee_id'] = app.assignee.id
else:
flowcontext['application_assignee_id'] = None
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
donothing = ''
elif flowcontext["may_update_vessels_list"] != "True":
# if app.state != app.APP_STATE_CHOICES.draft:
messages.error(
self.request, "Can't add new vessels to this application")
return HttpResponseRedirect(app.get_absolute_url())
return super(VesselCreate, self).get(request, *args, **kwargs)
def get_success_url(self):
return reverse('application_update', args=(self.kwargs['pk'],))
def get_context_data(self, **kwargs):
context = super(VesselCreate, self).get_context_data(**kwargs)
context['page_heading'] = 'Create new vessel details'
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = Application.objects.get(pk=self.kwargs['pk'])
return HttpResponseRedirect(app.get_absolute_url())
return super(VesselCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
app = Application.objects.get(pk=self.kwargs['pk'])
self.object = form.save()
app.vessels.add(self.object.id)
app.save()
if 'registration_json' in self.request.POST:
if is_json(self.request.POST['registration_json']) is True:
json_data = json.loads(self.request.POST['registration_json'])
for d in self.object.registration.all():
self.object.registration.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.registration.add(doc)
if 'documents_json' in self.request.POST:
if is_json(self.request.POST['documents_json']) is True:
json_data = json.loads(self.request.POST['documents_json'])
for d in self.object.documents.all():
self.object.documents.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.documents.add(doc)
# Registration document uploads.
# if self.request.FILES.get('registration'):
# for f in self.request.FILES.getlist('registration'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.registration.add(doc)
return HttpResponseRedirect(reverse('inside_popup_notification'),)
#return super(VesselCreate, self).form_valid(form)
class VesselDelete(LoginRequiredMixin, UpdateView):
model = Vessel
form_class = apps_forms.VesselDeleteForm
template_name = 'applications/vessel_delete.html'
def get(self, request, *args, **kwargs):
vessel = self.get_object()
app = self.get_object().application_set.first()
flow = Flow()
flowcontext = {}
if app.assignee:
flowcontext['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
flowcontext['application_assignee_id'] = self.request.user.id
else:
flowcontext['application_assignee_id'] = None
# flowcontext['application_assignee_id'] = app.assignee.id
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_vessels_list"] != "True":
# if app.state != app.APP_STATE_CHOICES.draft:
messages.error(
self.request, "Can't add new vessels to this application")
return HttpResponseRedirect(reverse('popup-error'))
#if referral.status != Referral.REFERRAL_STATUS_CHOICES.referred:
# messages.error(self.request, 'This delete is already completed!')
# return HttpResponseRedirect(referral.application.get_absolute_url())
return super(VesselDelete, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(VesselDelete, self).get_context_data(**kwargs)
context['vessel'] = self.get_object()
return context
def get_success_url(self, application_id):
return reverse('application_update', args=(application_id,))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_object().application.get_absolute_url())
return super(VesselDelete, self).post(request, *args, **kwargs)
def form_valid(self, form):
vessel = self.get_object()
# application_id = vessel.application.id
app = self.object.application_set.first()
vessel.delete()
# Record an action on the referral's application:
action = Action(
content_object=app, user=self.request.user,
action='Vessel to {} delete'.format(vessel.id))
action.save()
return HttpResponseRedirect(reverse('inside_popup_notification'),)
class VesselUpdate(LoginRequiredMixin, UpdateView):
model = Vessel
form_class = apps_forms.VesselForm
def get(self, request, *args, **kwargs):
app = self.get_object().application_set.first()
# Rule: can only change a vessel if the parent application is status
# 'draft'.
#if app.state != Application.APP_STATE_CHOICES.draft:
# messages.error(
# self.request, 'You can only change a vessel details when the application is "draft" status')
# return HttpResponseRedirect(app.get_absolute_url())
flowcontext = {}
if app.assignee:
flowcontext['application_assignee_id'] = app.assignee.id
else:
if float(app.routeid) == 1 and app.assignee is None:
flowcontext['application_assignee_id'] = self.request.user.id
else:
flowcontext['application_assignee_id'] = None
flow = Flow()
#flowcontext = {}
# flowcontext['application_assignee_id'] = app.assignee.id
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if flowcontext["may_update_vessels_list"] != "True":
# if app.state != app.APP_STATE_CHOICES.draft:
messages.error(
self.request, "Can't add new vessels to this application")
return HttpResponseRedirect(reverse('notification_popup'))
return super(VesselUpdate, self).get(request, *args, **kwargs)
def get_success_url(self,app_id):
return reverse('application_update', args=(app_id,))
def get_context_data(self, **kwargs):
context = super(VesselUpdate, self).get_context_data(**kwargs)
context['page_heading'] = 'Update vessel details'
return context
def get_initial(self):
initial = super(VesselUpdate, self).get_initial()
# initial['application_id'] = self.kwargs['pk']
vessels = self.get_object()
a1 = vessels.registration.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['registration'] = multifilelist
a1 = vessels.documents.all()
multifilelist = []
for b1 in a1:
fileitem = {}
fileitem['fileid'] = b1.id
fileitem['path'] = b1.upload.name
fileitem['name'] = b1.name
fileitem['extension'] = b1.extension
multifilelist.append(fileitem)
initial['documents'] = multifilelist
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
app = self.get_object().application_set.first()
return HttpResponseRedirect(app.get_absolute_url())
return super(VesselUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
# Registration document uploads.
# rego = self.object.registration.all()
if 'registration_json' in self.request.POST:
if is_json(self.request.POST['registration_json']) is True:
json_data = json.loads(self.request.POST['registration_json'])
for d in self.object.registration.all():
self.object.registration.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.registration.add(doc)
if 'documents_json' in self.request.POST:
if is_json(self.request.POST['documents_json']) is True:
json_data = json.loads(self.request.POST['documents_json'])
for d in self.object.documents.all():
self.object.documents.remove(d)
for i in json_data:
doc = Record.objects.get(id=i['doc_id'])
self.object.documents.add(doc)
#for filelist in rego:
# if 'registration-clear_multifileid-' + str(filelist.id) in form.data:
# self.object.registration.remove(filelist)
#
# if self.request.FILES.get('registration'):
# for f in self.request.FILES.getlist('registration'):
# doc = Record()
# doc.upload = f
# doc.save()
# self.object.registration.add(doc)
app = self.object.application_set.first()
#return HttpResponseRedirect(self.get_success_url(app.id),)
return HttpResponseRedirect(reverse('inside_popup_notification'),)
#class RecordCreate(LoginRequiredMixin, CreateView):
# form_class = apps_forms.RecordCreateForm
# template_name = 'applications/document_form.html'
#
# def get_context_data(self, **kwargs):
# context = super(RecordCreate, self).get_context_data(**kwargs)
# context['page_heading'] = 'Create new Record'
# return context
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('home_page'))
# return super(RecordCreate, self).post(request, *args, **kwargs)
#
# def form_valid(self, form):
# """Override form_valid to set the assignee as the object creator.
# """
# self.object = form.save(commit=False)
# self.object.save()
# success_url = reverse('document_list', args=(self.object.pk,))
# return HttpResponseRedirect(success_url)
#class RecordList(ListView):
# model = Record
#class UserAccount(LoginRequiredMixin, DetailView):
# model = EmailUser
# template_name = 'accounts/user_account.html'
#
# def get_object(self, queryset=None):
# """Override get_object to always return the request user.
# """
# return self.request.user
#
# def get_context_data(self, **kwargs):
# context = super(UserAccount, self).get_context_data(**kwargs)
# context['organisations'] = [i.organisation for i in Delegate.objects.filter(email_user=self.request.user)]
# return context
class UserAccountUpdate(LoginRequiredMixin, UpdateView):
form_class = apps_forms.EmailUserForm
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
donothing =""
elif self.request.user.id == int(self.kwargs['pk']):
donothing =""
else:
messages.error(self.request, 'Forbidden Access.')
return HttpResponseRedirect("/")
return super(UserAccountUpdate, self).get(request, *args, **kwargs)
def get_object(self, queryset=None):
if 'pk' in self.kwargs:
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
user = EmailUser.objects.get(pk=self.kwargs['pk'])
return user
elif self.request.user.id == int(self.kwargs['pk']):
user = EmailUser.objects.get(pk=self.kwargs['pk'])
return user
else:
messages.error(
self.request, "Forbidden Access")
return HttpResponseRedirect("/")
else:
return self.request.user
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('user_account'))
return HttpResponseRedirect(reverse('person_details_actions', args=(self.kwargs['pk'],'personal')))
return super(UserAccountUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
"""Override to set first_name and last_name on the EmailUser object.
"""
self.obj = form.save(commit=False)
# If identification has been uploaded, then set the id_verified field to None.
#if 'identification' in data and data['identification']:
# self.obj.id_verified = None
self.obj.save()
# return HttpResponseRedirect(reverse('user_account'))
# Record an action on the application:
# print self.object.all()
# print serializers.serialize('json', self.object)
# from django.core import serializers
# forms_data = form.cleaned_data
# print serializers.serialize('json', [ forms_data ])
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='Updated Personal Details')
action.save()
return HttpResponseRedirect(reverse('person_details_actions', args=(self.obj.pk,'personal')))
class UserAccountIdentificationUpdate(LoginRequiredMixin, UpdateView):
form_class = apps_forms.UserFormIdentificationUpdate
model = EmailUser
#form_class = apps_forms.OrganisationCertificateForm
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = self.get_organisation()
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
pass
else:
if admin_staff is True:
return super(UserAccountIdentificationUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to view this organisation.')
return HttpResponseRedirect(reverse('home_page'))
return super(UserAccountIdentificationUpdate, self).get(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
if admin_staff == True:
return super(UserAccountIdentificationUpdate, self).get(request, *args, **kwargs)
elif self.request.user.id == int(self.kwargs['pk']):
return super(UserAccountIdentificationUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'Forbidden Access.')
return HttpResponseRedirect("/")
def get_object(self, queryset=None):
if 'pk' in self.kwargs:
pk = self.kwargs['pk']
if self.request.user.groups.filter(name__in=['Statdev Processor']).exists():
user = EmailUser.objects.get(pk=self.kwargs['pk'])
return user
elif self.request.user.id == int(pk):
user = EmailUser.objects.get(pk=self.kwargs['pk'])
return user
else:
print ("Forbidden Access")
messages.error(
self.request, "Forbidden Access")
return HttpResponseRedirect("/")
else:
return self.request.user
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('user_account'))
return super(UserAccountIdentificationUpdate, self).post(request, *args, **kwargs)
def get_initial(self):
initial = super(UserAccountIdentificationUpdate, self).get_initial()
emailuser = self.get_object()
if emailuser.identification2:
url_data = setUrl()
url_data.url = "/private-ledger/view/"+str(emailuser.identification2.id)+'-'+emailuser.identification2.name+'.'+emailuser.identification2.extension
url_data.value = str(emailuser.identification2.id)+'-'+emailuser.identification2.name+'.'+emailuser.identification2.extension
initial['identification2'] = url_data
return initial
def form_valid(self, form):
"""Override to set first_name and last_name on the EmailUser object.
"""
self.obj = form.save(commit=False)
forms_data = form.cleaned_data
id_success = "None"
# If identification has been uploaded, then set the id_verified field to None.
# if 'identification' in data and data['identification']:
# self.obj.id_verified = None
if self.request.POST.get('identification2-clear'):
self.obj.identification2 = None
id_success = "Removed"
if self.request.FILES.get('identification2'):
if Attachment_Extension_Check('single', forms_data['identification2'], None) is False:
raise ValidationError('Identification contains and unallowed attachment extension.')
identification2_file = self.request.FILES['identification2']
data = base64.b64encode(identification2_file.read())
filename=forms_data['identification2'].name
api_key = settings.LEDGER_API_KEY
url = settings.LEDGER_API_URL+'/ledgergw/remote/documents/update/'+api_key+'/'
extension =''
if filename[-4:][:-3] == '.':
extension = filename[-3:]
if filename[-5:][:-4] == '.':
extension = filename[-4:]
base64_url = "data:"+mimetypes.types_map['.'+str(extension)]+";base64,"+data.decode()
myobj = {'emailuser_id' :self.object.pk,'filebase64': base64_url, 'extension': extension, 'file_group_id': 1}
try:
resp = requests.post(url, data = myobj)
id_success = "Uploaded new "
except:
messages.error(self.request, 'Error Saving Indentifcation')
id_success = "Error uploading"
# temporary until all EmailUser Updates go via api.
eu_obj = EmailUser.objects.get(id=self.object.pk)
self.object.identification2=eu_obj.identification2
#new_doc = Document()
#new_doc.file = self.request.FILES['identification']
#new_doc.save()
#self.obj.identification = new_doc
self.obj.save()
action = Action(
content_object=self.object, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action= id_success+' identification')
action.save()
return HttpResponseRedirect(reverse('person_details_actions', args=(self.obj.pk,'identification')))
class OrganisationCertificateUpdate(LoginRequiredMixin, UpdateView):
model = OrganisationExtras
form_class = apps_forms.OrganisationCertificateForm
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org_extras = self.get_object()
org =org_extras.organisation
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
pass
else:
if admin_staff is True:
return super(OrganisationCertificateUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationCertificateUpdate, self).get(request, *args, **kwargs)
# def get_object(self, queryset=None):
# if 'pk' in self.kwargs:
# if self.request.user.groups.filter(name__in=['Processor']).exists():
# #user = EmailUser.objects.get(pk=self.kwargs['pk'])
# return self
# else:
# messages.error(
# self.request, "Forbidden Access")
# return HttpResponseRedirect("/")
# else:
# return self.request.user
def post(self, request, *args, **kwargs):
if 'identification' in request.FILES:
if Attachment_Extension_Check('single', request.FILES['identification'], ['.pdf','.png','.jpg']) is False:
messages.error(self.request,'You have added and unallowed attachment extension.')
return HttpResponseRedirect(request.path)
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['pk'],'certofincorp')))
return super(OrganisationCertificateUpdate, self).post(request, *args, **kwargs)
def get_initial(self):
initial = super(OrganisationCertificateUpdate, self).get_initial()
org = self.get_object()
#print org.identification
if self.object.identification:
initial['identification'] = self.object.identification.upload
return initial
def form_valid(self, form):
"""Override to set first_name and last_name on the EmailUser object.
"""
self.obj = form.save(commit=False)
forms_data = form.cleaned_data
# If identification has been uploaded, then set the id_verified field to None.
# if 'identification' in data and data['identification']:
# self.obj.id_verified = None
if self.request.POST.get('identification-clear'):
self.obj.identification = None
if self.request.FILES.get('identification'):
if Attachment_Extension_Check('single', forms_data['identification'], ['.pdf','.png','.jpg']) is False:
raise ValidationError('Identification contains and unallowed attachment extension.')
new_doc = Record()
new_doc.upload = self.request.FILES['identification']
new_doc.save()
self.obj.identification = new_doc
self.obj.save()
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.obj.organisation.pk,'certofincorp')))
class AddressCreate(LoginRequiredMixin, CreateView):
"""A view to create a new address for an EmailUser.
"""
form_class = apps_forms.AddressForm
template_name = 'accounts/address_form.html'
def get(self, request, *args, **kwargs):
# # Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
self.object = EmailUser.objects.get(id=self.kwargs['userid'])
#
if admin_staff is True:
return super(AddressCreate, self).get(request, *args, **kwargs)
elif request.user == self.object:
return super(AddressCreate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to view.')
return HttpResponseRedirect(reverse('home_page'))
# return super(AddressCreate, self).get(request, *args, **kwargs)
def dispatch(self, request, *args, **kwargs):
# Rule: the ``type`` kwarg must be 'postal' or 'billing'
if self.kwargs['type'] not in ['postal', 'billing']:
messages.error(self.request, 'Invalid address type!')
return HttpResponseRedirect(reverse('user_account'))
return super(AddressCreate, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(AddressCreate, self).get_context_data(**kwargs)
context['address_type'] = self.kwargs['type']
context['action'] = 'Create'
if 'userid' in self.kwargs:
user = EmailUser.objects.get(id=self.kwargs['userid'])
context['principal'] = user.email
else:
context['principal'] = self.request.user.email
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('user_account'))
return super(AddressCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
if 'userid' in self.kwargs:
u = EmailUser.objects.get(id=self.kwargs['userid'])
else:
u = self.request.user
self.obj = form.save(commit=False)
self.obj.user = u
self.obj.save()
# Attach the new address to the user's profile.
if self.kwargs['type'] == 'postal':
u.postal_address = self.obj
elif self.kwargs['type'] == 'billing':
u.billing_address = self.obj
u.save()
# if 'userid' in self.kwargs:
# if self.request.user.is_staff is True:
action = Action(
content_object=u, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='New '+self.kwargs['type']+' address created')
action.save()
return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
# else:
# return HttpResponseRedirect(reverse('user_account'))
# else:
# return HttpResponseRedirect(reverse('user_account'))
#class AAOrganisationAddressUpdate(LoginRequiredMixin, UpdateView):
#
# model = OrganisationAddress
# form_class = apps_forms.OrganisationAddressForm
# template_name = 'accounts/address_form.html'
#
# def get(self, request, *args, **kwargs):
# context_processor = template_context(self.request)
# admin_staff = context_processor['admin_ddstaff']
#
# address = self.get_object()
#
class OrganisationAddressUpdate(LoginRequiredMixin, UpdateView):
model = OrganisationAddress
form_class = apps_forms.AddressForm
success_url = reverse_lazy('user_account')
template_name = 'accounts/address_form.html'
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
address = self.get_object()
u = request.user
# User addresses: only the user can change an address.
if u.postal_address == address or u.billing_address == address:
return super(OrganisationAddressUpdate, self).get(request, *args, **kwargs)
# Organisational addresses: find which org uses this address, and if
# the user is a delegate for that org then they can change it.
org_list = list(chain(address.org_postal_address.all(), address.org_billing_address.all()))
if Delegate.objects.filter(email_user=u, organisation__in=org_list).exists():
return super(OrganisationAddressUpdate, self).get(request, *args, **kwargs)
# elif u.is_staff is True:
elif admin_staff is True:
return super(OrganisationAddressUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You cannot update this address!')
return HttpResponseRedirect(reverse('home_page'))
def get_context_data(self, **kwargs):
context = super(OrganisationAddressUpdate, self).get_context_data(**kwargs)
context['action'] = 'Update'
address = self.get_object()
u = self.request.user
if u.postal_address == address:
context['action'] = 'Update postal'
context['principal'] = u.email
if u.billing_address == address:
context['action'] = 'Update billing'
context['principal'] = u.email
# TODO: include context for Organisation addresses.
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
#return HttpResponseRedirect(self.success_url)
obj = self.get_object()
u = obj.user
if 'org_id' in self.kwargs:
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['org_id'],'address')))
else:
return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
#if self.request.user.is_staff is True:
# obj = self.get_object()
# u = obj.user
# return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
#else:
return super(OrganisationAddressUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.obj = form.save()
obj = self.get_object()
#u = obj.user
if 'org_id' in self.kwargs:
org =Organisation.objects.get(id= self.kwargs['org_id'])
action = Action(
content_object=org, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='Organisation address updated')
action.save()
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['org_id'],'address')))
else:
action = Action(
content_object=u, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='Person address updated')
action.save()
return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
class AddressUpdate(LoginRequiredMixin, UpdateView):
model = Address
form_class = apps_forms.AddressForm
success_url = reverse_lazy('user_account')
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
address = self.get_object()
u = request.user
# User addresses: only the user can change an address.
if u.postal_address == address or u.billing_address == address:
return super(AddressUpdate, self).get(request, *args, **kwargs)
# Organisational addresses: find which org uses this address, and if
# the user is a delegate for that org then they can change it.
org_list = list(chain(address.org_postal_address.all(), address.org_billing_address.all()))
if Delegate.objects.filter(email_user=u, organisation__in=org_list).exists():
return super(AddressUpdate, self).get(request, *args, **kwargs)
# elif u.is_staff is True:
elif admin_staff is True:
return super(AddressUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You cannot update this address!')
return HttpResponseRedirect(reverse('home_page'))
def get_context_data(self, **kwargs):
context = super(AddressUpdate, self).get_context_data(**kwargs)
context['action'] = 'Update'
address = self.get_object()
u = self.request.user
if u.postal_address == address:
context['action'] = 'Update postal'
context['principal'] = u.email
if u.billing_address == address:
context['action'] = 'Update billing'
context['principal'] = u.email
# TODO: include context for Organisation addresses.
return context
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
#return HttpResponseRedirect(self.success_url)
obj = self.get_object()
u = obj.user
if 'org_id' in self.kwargs:
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['org_id'],'address')))
else:
return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
#if self.request.user.is_staff is True:
# obj = self.get_object()
# u = obj.user
# return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
#else:
return super(AddressUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.obj = form.save()
obj = self.get_object()
u = obj.user
if 'org_id' in self.kwargs:
org =Organisation.objects.get(id= self.kwargs['org_id'])
action = Action(
content_object=org, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='Organisation address updated')
action.save()
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['org_id'],'address')))
else:
action = Action(
content_object=u, category=Action.ACTION_CATEGORY_CHOICES.change, user=self.request.user,
action='Person address updated')
action.save()
return HttpResponseRedirect(reverse('person_details_actions', args=(u.id,'address')))
#class AddressDelete(LoginRequiredMixin, DeleteView):
# """A view to allow the deletion of an address. Not currently in use,
# because the ledge Address model can cause the linked EmailUser object to
# be deleted along with the Address object :/
# """
# model = Address
# success_url = reverse_lazy('user_account')
#
# def get(self, request, *args, **kwargs):
# address = self.get_object()
# u = self.request.user
# delete_address = False
# # Rule: only the address owner can delete an address.
# if u.postal_address == address or u.billing_address == address:
# delete_address = True
# # Organisational addresses: find which org uses this address, and if
# # the user is a delegate for that org then they can delete it.
# #org_list = list(chain(address.org_postal_address.all(), address.org_billing_address.all()))
# #for org in org_list:
# # if profile in org.delegates.all():
# # delete_address = True
# if delete_address:
# return super(AddressDelete, self).get(request, *args, **kwargs)
# else:
# messages.error(self.request, 'You cannot delete this address!')
# return HttpResponseRedirect(self.success_url)
#
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(self.success_url)
# return super(AddressDelete, self).post(request, *args, **kwargs)
#class OrganisationList(LoginRequiredMixin, ListView):
# model = Organisation
#
# def get_queryset(self):
# qs = super(OrganisationList, self).get_queryset()
# # Did we pass in a search string? If so, filter the queryset and return it.
# if 'q' in self.request.GET and self.request.GET['q']:
# query_str = self.request.GET['q']
# # Replace single-quotes with double-quotes
# query_str = query_str.replace("'", r'"')
# # Filter by name and ABN fields.
# query = get_query(query_str, ['name', 'abn'])
# qs = qs.filter(query).distinct()
# return qs
class PersonDetails(LoginRequiredMixin, DetailView):
model = EmailUser
template_name = 'applications/person_details.html'
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
self.object = self.get_object()
if admin_staff is True:
return super(PersonDetails, self).get(request, *args, **kwargs)
elif request.user == self.object:
return super(PersonDetails, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to view.')
return HttpResponseRedirect(reverse('home_page'))
def get_queryset(self):
qs = super(PersonDetails, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return it.
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by name and ABN fields.
query = get_query(query_str, ['name', 'abn'])
qs = qs.filter(query).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(PersonDetails, self).get_context_data(**kwargs)
org = self.get_object()
# context['user_is_delegate'] = Delegate.objects.filter(email_user=self.request.user, organisation=org).exists()
context['nav_details'] = 'active'
if "action" in self.kwargs:
action=self.kwargs['action']
# Navbar
if action == "personal":
context['nav_details_personal'] = "active"
elif action == "identification":
context['nav_details_identification'] = "active"
#context['person'] = EmailUser.objects.get(id=self.kwargs['pk'])
elif action == "address":
context['nav_details_address'] = "active"
elif action == "contactdetails":
context['nav_details_contactdetails'] = "active"
elif action == "companies":
context['nav_details_companies'] = "active"
user = EmailUser.objects.get(id=self.kwargs['pk'])
context['organisations'] = Delegate.objects.filter(email_user=user)
# for i in context['organisations']:
# print i.organisation.name
#print context['organisations']
return context
#class PersonOrgDelete(LoginRequiredMixin, UpdateView):
# model = Organisation
# form_class = apps_forms.PersonOrgDeleteForm
# template_name = 'applications/referral_delete.html'
#
# def get(self, request, *args, **kwargs):
# referral = self.get_object()
# return super(PersonOrgDelete, self).get(request, *args, **kwargs)
#
# def get_success_url(self, org_id):
# return reverse('person_details_actions', args=(org_id,'companies'))
#
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('person_details_actions', args=(self.kwargs['pk'],'companies')))
# return super(PersonOrgDelete, self).post(request, *args, **kwargs)
#
# def form_valid(self, form):
# org = self.get_object()
# org_id = org.id
# org.delete()
# # Record an action on the referral's application:
# action = Action(
# content_object=ref.application, user=self.request.user,
# action='Organisation {} deleted'.format(org_id))
# action.save()
# return HttpResponseRedirect(self.get_success_url(self.pk))
class PersonOther(LoginRequiredMixin, DetailView):
model = EmailUser
template_name = 'applications/person_details.html'
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
self.object = self.get_object()
if admin_staff is True:
return super(PersonOther, self).get(request, *args, **kwargs)
#elif request.user == self.object:
# return super(PersonOther, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised')
return HttpResponseRedirect(reverse('home_page'))
# def get_queryset(self):
# qs = super(PersonOther, self).get_queryset()
# # Did we pass in a search string? If so, filter the queryset and return it.
# if 'q' in self.request.GET and self.request.GET['q']:
# query_str = self.request.GET['q']
# # Replace single-quotes with double-quotes
# query_str = query_str.replace("'", r'"')
# # Filter by name and ABN fields.
# query = get_query(query_str, ['name', 'abn'])
# qs = qs.filter(query).distinct()
#print self.template_name
# return qs
def get_context_data(self, **kwargs):
context = super(PersonOther, self).get_context_data(**kwargs)
org = self.get_object()
# context['user_is_delegate'] = Delegate.objects.filter(email_user=self.request.user, organisation=org).exists()
context['nav_other'] = 'active'
if "action" in self.kwargs:
action=self.kwargs['action']
# Navbar
if action == "applications":
user = EmailUser.objects.get(id=self.kwargs['pk'])
delegate = Delegate.objects.filter(email_user=user).values('organisation__id')
context['nav_other_applications'] = "active"
context['app'] = ''
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
for i in Application.APP_TYPE_CHOICES:
if i[0] in [5,6,7,8,9,10,11]:
skip = 'yes'
else:
APP_TYPE_CHOICES.append(i)
APP_TYPE_CHOICES_IDS.append(i[0])
context['app_apptypes'] = APP_TYPE_CHOICES
#context['app_appstatus'] = list(Application.APP_STATE_CHOICES)
context['app_appstatus'] = Application.APP_STATUS
search_filter = Q(applicant=self.kwargs['pk']) | Q(organisation__in=delegate)
if 'searchaction' in self.request.GET and self.request.GET['searchaction']:
query_str = self.request.GET['q']
# query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str)
if self.request.GET['apptype'] != '':
search_filter &= Q(app_type=int(self.request.GET['apptype']))
else:
end = ''
# search_filter &= Q(app_type__in=APP_TYPE_CHOICES_IDS)
if self.request.GET['appstatus'] != '':
search_filter &= Q(status=int(self.request.GET['appstatus']))
if self.request.GET['wfstatus'] != '':
search_filter &= Q(route_status=self.request.GET['wfstatus'])
#if self.request.GET['appstatus'] != '':
# search_filter &= Q(state=int(self.request.GET['appstatus']))
# applications = Application.objects.filter(query_obj)
context['query_string'] = self.request.GET['q']
if self.request.GET['apptype'] != '':
context['apptype'] = int(self.request.GET['apptype'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'wfstatus' in self.request.GET:
if self.request.GET['wfstatus'] != '':
context['wfstatus'] = self.request.GET['wfstatus']
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter &= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
if 'from_date' in self.request.GET:
context['from_date'] = self.request.GET['from_date']
context['to_date'] = self.request.GET['to_date']
if self.request.GET['from_date'] != '':
from_date_db = datetime.strptime(self.request.GET['from_date'], '%d/%m/%Y').date()
search_filter &= Q(submit_date__gte=from_date_db)
if self.request.GET['to_date'] != '':
to_date_db = datetime.strptime(self.request.GET['to_date'], '%d/%m/%Y').date()
search_filter &= Q(submit_date__lte=to_date_db)
# print Q(Q(state__in=APP_TYPE_CHOICES_IDS) & Q(search_filter))
applications = Application.objects.filter(Q(app_type__in=APP_TYPE_CHOICES_IDS) & Q(search_filter) )[:200]
context['app_wfstatus'] = list(Application.objects.values_list('route_status',flat = True).distinct())
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
# Create a distinct list of applicants
# if app.applicant:
# if app.applicant.id in context['app_applicants']:
# donothing = ''
# else:
# context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
# context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name })
# end of creation
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
context['app_list'].append(row)
elif action == "approvals":
context['nav_other_approvals'] = "active"
user = EmailUser.objects.get(id=self.kwargs['pk'])
delegate = Delegate.objects.filter(email_user=user).values('id')
search_filter = Q(applicant=self.kwargs['pk'], status=1 ) | Q(organisation__in=delegate)
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
for i in Application.APP_TYPE_CHOICES:
if i[0] in [5,6,7,8,9,10,11]:
skip = 'yes'
else:
APP_TYPE_CHOICES.append(i)
APP_TYPE_CHOICES_IDS.append(i[0])
context['app_apptypes']= APP_TYPE_CHOICES
if 'action' in self.request.GET and self.request.GET['action']:
# query_str = self.request.GET['q']
# search_filter = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str)
if self.request.GET['apptype'] != '':
search_filter &= Q(app_type=int(self.request.GET['apptype']))
else:
search_filter &= Q(app_type__in=APP_TYPE_CHOICES_IDS)
if self.request.GET['appstatus'] != '':
search_filter &= Q(status=int(self.request.GET['appstatus']))
context['query_string'] = self.request.GET['q']
if self.request.GET['apptype'] != '':
context['apptype'] = int(self.request.GET['apptype'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
if 'from_date' in self.request.GET:
context['from_date'] = self.request.GET['from_date']
context['to_date'] = self.request.GET['to_date']
if self.request.GET['from_date'] != '':
from_date_db = datetime.strptime(self.request.GET['from_date'], '%d/%m/%Y').date()
search_filter &= Q(issue_date__gte=from_date_db)
if self.request.GET['to_date'] != '':
to_date_db = datetime.strptime(self.request.GET['to_date'], '%d/%m/%Y').date()
search_filter &= Q(issue_date__lte=to_date_db)
approval = Approval.objects.filter(search_filter)[:200]
context['app_list'] = []
context['app_applicants'] = {}
context['app_applicants_list'] = []
context['app_appstatus'] = list(Approval.APPROVAL_STATE_CHOICES)
for app in approval:
row = {}
row['app'] = app
if app.applicant:
if app.applicant.id in context['app_applicants']:
donothing = ''
else:
context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name})
context['app_list'].append(row)
elif action == "emergency":
context['nav_other_emergency'] = "active"
action=self.kwargs['action']
# Navbar
context['app'] = ''
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
# for i in Application.APP_TYPE_CHOICES:
# if i[0] in [4,5,6,7,8,9,10,11]:
# skip = 'yes'
# else:
# APP_TYPE_CHOICES.append(i)
# APP_TYPE_CHOICES_IDS.append(i[0])
APP_TYPE_CHOICES.append('4')
APP_TYPE_CHOICES_IDS.append('4')
context['app_apptypes']= APP_TYPE_CHOICES
context['app_appstatus'] = list(Application.APP_STATE_CHOICES)
user = EmailUser.objects.get(id=self.kwargs['pk'])
delegate = Delegate.objects.filter(email_user=user).values('id')
search_filter = Q(applicant=self.kwargs['pk'], app_type=4) | Q(organisation__in=delegate)
if 'searchaction' in self.request.GET and self.request.GET['searchaction']:
query_str = self.request.GET['q']
# query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str)
context['query_string'] = self.request.GET['q']
if self.request.GET['appstatus'] != '':
search_filter &= Q(state=int(self.request.GET['appstatus']))
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
applications = Application.objects.filter(search_filter)[:200]
# print applications
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
# Create a distinct list of applicants
# if app.applicant:
# if app.applicant.id in context['app_applicants']:
# donothing = ''
# else:
# context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
# context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name })
# end of creation
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
context['app_list'].append(row)
elif action == "clearance":
context['nav_other_clearance'] = "active"
if 'q' in self.request.GET and self.request.GET['q']:
context['query_string'] = self.request.GET['q']
user = EmailUser.objects.get(id=self.kwargs['pk'])
delegate = Delegate.objects.filter(email_user=user).values('id')
search_filter = Q(applicant=self.kwargs['pk']) | Q(organisation__in=delegate)
#items = Compliance.objects.filter(applicant=self.kwargs['pk']).order_by('due_date')
context['app_applicants'] = {}
context['app_applicants_list'] = []
context['app_apptypes'] = list(Application.APP_TYPE_CHOICES)
APP_STATUS_CHOICES = []
for i in Application.APP_STATE_CHOICES:
if i[0] in [1,11,16]:
APP_STATUS_CHOICES.append(i)
context['app_appstatus'] = list(APP_STATUS_CHOICES)
if 'action' in self.request.GET and self.request.GET['action']:
query_str = self.request.GET['q']
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter &= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
if 'from_date' in self.request.GET:
context['from_date'] = self.request.GET['from_date']
context['to_date'] = self.request.GET['to_date']
if self.request.GET['from_date'] != '':
from_date_db = datetime.strptime(self.request.GET['from_date'], '%d/%m/%Y').date()
search_filter &= Q(due_date__gte=from_date_db)
if self.request.GET['to_date'] != '':
to_date_db = datetime.strptime(self.request.GET['to_date'], '%d/%m/%Y').date()
search_filter &= Q(due_date__lte=to_date_db)
items = Compliance.objects.filter(search_filter).order_by('due_date')[:100]
context['compliance'] = items
# query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(assignee__email__icontains=query_str)
# query_obj &= Q(app_type=4)
# if self.request.GET['applicant'] != '':
# query_obj &= Q(applicant=int(self.request.GET['applicant']))
# if self.request.GET['appstatus'] != '':
# query_obj &= Q(state=int(self.request.GET['appstatus']))
# applications = Compliance.objects.filter(query_obj)
# context['query_string'] = self.request.GET['q']
#if 'applicant' in self.request.GET:
# if self.request.GET['applicant'] != '':
# context['applicant'] = int(self.request.GET['applicant'])
#if 'appstatus' in self.request.GET:
# if self.request.GET['appstatus'] != '':
# context['appstatus'] = int(self.request.GET['appstatus'])
#usergroups = self.request.user.groups.all()
#context['app_list'] = []
#for item in items:
# row = {}
# row['may_assign_to_person'] = 'False'
# row['app'] = item
#context['may_create'] = True
#processor = Group.objects.get(name='Processor')
# Rule: admin officers may self-assign applications.
#if processor in self.request.user.groups.all() or self.request.user.is_superuser:
# context['may_assign_processor'] = True
return context
class OrganisationDetails(LoginRequiredMixin, DetailView):
model = Organisation
template_name = 'applications/organisation_details.html'
def get_organisation(self):
return Organisation.objects.get(pk=self.kwargs['pk'])
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = self.get_organisation()
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
donothing = ""
else:
if admin_staff is True:
return super(OrganisationDetails, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to view this organisation.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationDetails, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(OrganisationDetails, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return it.
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by name and ABN fields.
query = get_query(query_str, ['name', 'abn'])
qs = qs.filter(query).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(OrganisationDetails, self).get_context_data(**kwargs)
org = self.get_object()
context['user_is_delegate'] = Delegate.objects.filter(email_user=self.request.user, organisation=org).exists()
context['nav_details'] = 'active'
if "action" in self.kwargs:
action=self.kwargs['action']
# Navbar
if action == "company":
context['nav_details_company'] = "active"
elif action == "certofincorp":
context['nav_details_certofincorp'] = "active"
org = Organisation.objects.get(id=self.kwargs['pk'])
if OrganisationExtras.objects.filter(organisation=org.id).exists():
context['org_extras'] = OrganisationExtras.objects.get(organisation=org.id)
context['org'] = org
elif action == "address":
context['nav_details_address'] = "active"
elif action == "contactdetails":
context['nav_details_contactdetails'] = "active"
org = Organisation.objects.get(id=self.kwargs['pk'])
context['organisation_contacts'] = OrganisationContact.objects.filter(organisation=org)
elif action == "linkedperson":
context['nav_details_linkedperson'] = "active"
org = Organisation.objects.get(id=self.kwargs['pk'])
context['linkedpersons'] = Delegate.objects.filter(organisation=org)
if OrganisationExtras.objects.filter(organisation=org.id).exists():
context['org_extras'] = OrganisationExtras.objects.get(organisation=org.id)
return context
class OrganisationOther(LoginRequiredMixin, DetailView):
model = Organisation
template_name = 'applications/organisation_details.html'
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = self.get_object()
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
donothing = ""
else:
if admin_staff is True:
return super(OrganisationOther, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to view this organisation.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationOther, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = super(OrganisationOther, self).get_queryset()
# Did we pass in a search string? If so, filter the queryset and return it.
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
# Replace single-quotes with double-quotes
query_str = query_str.replace("'", r'"')
# Filter by name and ABN fields.
query = get_query(query_str, ['name', 'abn'])
qs = qs.filter(query).distinct()
#print self.template_name
return qs
def get_context_data(self, **kwargs):
context = super(OrganisationOther, self).get_context_data(**kwargs)
org = self.get_object()
context['user_is_delegate'] = Delegate.objects.filter(email_user=self.request.user, organisation=org).exists()
context['nav_other'] = 'active'
if "action" in self.kwargs:
action=self.kwargs['action']
# Navbar
if action == "applications":
context['nav_other_applications'] = "active"
context['app'] = ''
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
for i in Application.APP_TYPE_CHOICES:
if i[0] in [4,5,6,7,8,9,10,11]:
skip = 'yes'
else:
APP_TYPE_CHOICES.append(i)
APP_TYPE_CHOICES_IDS.append(i[0])
context['app_apptypes'] = APP_TYPE_CHOICES
context['app_appstatus'] = list(Application.APP_STATE_CHOICES)
search_filter = Q(organisation=self.kwargs['pk'])
if 'searchaction' in self.request.GET and self.request.GET['searchaction']:
query_str = self.request.GET['q']
# query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str)
if self.request.GET['apptype'] != '':
search_filter &= Q(app_type=int(self.request.GET['apptype']))
else:
end = ''
# search_filter &= Q(app_type__in=APP_TYPE_CHOICES_IDS)
if self.request.GET['appstatus'] != '':
search_filter &= Q(state=int(self.request.GET['appstatus']))
# applications = Application.objects.filter(query_obj)
context['query_string'] = self.request.GET['q']
if self.request.GET['apptype'] != '':
context['apptype'] = int(self.request.GET['apptype'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter = Q(pk__contains=se_wo) | Q(title__contains=se_wo)
applications = Application.objects.filter(search_filter)[:200]
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
context['app_list'].append(row)
elif action == "approvals":
context['nav_other_approvals'] = "active"
search_filter = Q(organisation__in=self.kwargs['pk'], status=1)
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
for i in Application.APP_TYPE_CHOICES:
if i[0] in [4,5,6,7,8,9,10,11]:
skip = 'yes'
else:
APP_TYPE_CHOICES.append(i)
APP_TYPE_CHOICES_IDS.append(i[0])
context['app_apptypes']= APP_TYPE_CHOICES
if 'action' in self.request.GET and self.request.GET['action']:
# query_str = self.request.GET['q']
# search_filter = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str)
if self.request.GET['apptype'] != '':
search_filter &= Q(app_type=int(self.request.GET['apptype']))
else:
search_filter &= Q(app_type__in=APP_TYPE_CHOICES_IDS)
if self.request.GET['appstatus'] != '':
search_filter &= Q(status=int(self.request.GET['appstatus']))
context['query_string'] = self.request.GET['q']
if self.request.GET['apptype'] != '':
context['apptype'] = int(self.request.GET['apptype'])
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
approval = Approval.objects.filter(search_filter)[:200]
context['app_list'] = []
context['app_applicants'] = {}
context['app_applicants_list'] = []
context['app_appstatus'] = list(Approval.APPROVAL_STATE_CHOICES)
for app in approval:
row = {}
row['app'] = app
row['approval_url'] = app.approval_url
if app.applicant:
if app.applicant.id in context['app_applicants']:
donothing = ''
else:
context['app_applicants'][app.applicant.id] = app.applicant.first_name + ' ' + app.applicant.last_name
context['app_applicants_list'].append({"id": app.applicant.id, "name": app.applicant.first_name + ' ' + app.applicant.last_name})
context['app_list'].append(row)
elif action == "emergency":
context['nav_other_emergency'] = "active"
action=self.kwargs['action']
context['app'] = ''
APP_TYPE_CHOICES = []
APP_TYPE_CHOICES_IDS = []
APP_TYPE_CHOICES.append('4')
APP_TYPE_CHOICES_IDS.append('4')
context['app_apptypes']= APP_TYPE_CHOICES
context['app_appstatus'] = list(Application.APP_STATE_CHOICES)
#user = EmailUser.objects.get(id=self.kwargs['pk'])
#delegate = Delegate.objects.filter(email_user=user).values('id')
search_filter = Q(organisation=self.kwargs['pk'], app_type=4)
if 'searchaction' in self.request.GET and self.request.GET['searchaction']:
query_str = self.request.GET['q']
# query_obj = Q(pk__contains=query_str) | Q(title__icontains=query_str) | Q(applicant__email__icontains=query_str) | Q(organisation__name__icontains=query_str) | Q(assignee__email__icontains=query_str)
context['query_string'] = self.request.GET['q']
if self.request.GET['appstatus'] != '':
search_filter &= Q(state=int(self.request.GET['appstatus']))
if 'appstatus' in self.request.GET:
if self.request.GET['appstatus'] != '':
context['appstatus'] = int(self.request.GET['appstatus'])
if 'q' in self.request.GET and self.request.GET['q']:
query_str = self.request.GET['q']
query_str_split = query_str.split()
for se_wo in query_str_split:
search_filter= Q(pk__contains=se_wo) | Q(title__contains=se_wo)
applications = Application.objects.filter(search_filter)[:200]
# print applications
usergroups = self.request.user.groups.all()
context['app_list'] = []
for app in applications:
row = {}
row['may_assign_to_person'] = 'False'
row['app'] = app
if app.group is not None:
if app.group in usergroups:
row['may_assign_to_person'] = 'True'
context['app_list'].append(row)
elif action == "clearance":
context['nav_other_clearance'] = "active"
context['query_string'] = ''
if 'q' in self.request.GET:
context['query_string'] = self.request.GET['q']
search_filter = Q(organisation=self.kwargs['pk'])
items = Compliance.objects.filter(applicant=self.kwargs['pk']).order_by('due_date')
context['app_applicants'] = {}
context['app_applicants_list'] = []
context['app_apptypes'] = list(Application.APP_TYPE_CHOICES)
APP_STATUS_CHOICES = []
for i in Application.APP_STATE_CHOICES:
if i[0] in [1,11,16]:
APP_STATUS_CHOICES.append(i)
context['app_appstatus'] = list(APP_STATUS_CHOICES)
context['compliance'] = items
return context
#class OrganisationCreate(LoginRequiredMixin, CreateView):
# """A view to create a new Organisation.
# """
# form_class = apps_forms.OrganisationForm
# template_name = 'accounts/organisation_form.html'
#
# def get_context_data(self, **kwargs):
# context = super(OrganisationCreate, self).get_context_data(**kwargs)
# context['action'] = 'Create'
# return context
#
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('organisation_list'))
# return super(OrganisationCreate, self).post(request, *args, **kwargs)
#
# def form_valid(self, form):
# self.obj = form.save()
# # Assign the creating user as a delegate to the new organisation.
# Delegate.objects.create(email_user=self.request.user, organisation=self.obj)
# messages.success(self.request, 'New organisation created successfully!')
# return HttpResponseRedirect(reverse('organisation_detail', args=(self.obj.pk,)))
#class OrganisationUserCreate(LoginRequiredMixin, CreateView):
# """A view to create a new Organisation.
# """
# form_class = apps_forms.OrganisationForm
# template_name = 'accounts/organisation_form.html'
#
# def get_context_data(self, **kwargs):
# context = super(OrganisationUserCreate, self).get_context_data(**kwargs)
# context['action'] = 'Create'
# return context
#
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(reverse('organisation_list'))
# return super(OrganisationUserCreate, self).post(request, *args, **kwargs)
#
# def form_valid(self, form):
# self.obj = form.save()
# # Assign the creating user as a delegate to the new organisation.
# user = EmailUser.objects.get(id=self.kwargs['pk'])
# Delegate.objects.create(email_user=user, organisation=self.obj)
# messages.success(self.request, 'New organisation created successfully!')
# return HttpResponseRedirect(reverse('organisation_detail', args=(self.obj.pk,)))
#class OrganisationDetail(LoginRequiredMixin, DetailView):
# model = Organisation
#
# def get_context_data(self, **kwargs):
# context = super(OrganisationDetail, self).get_context_data(**kwargs)
# org = self.get_object()
# context['user_is_delegate'] = Delegate.objects.filter(email_user=self.request.user, organisation=org).exists()
# return context
class OrganisationUpdate(LoginRequiredMixin, UpdateView):
"""A view to update an Organisation object.
"""
model = Organisation
form_class = apps_forms.OrganisationForm
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = self.get_object()
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
pass
else:
if admin_staff is True:
return super(OrganisationUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationUpdate, self).get(request, *args, **kwargs)
# def get(self, request, *args, **kwargs):
# # Rule: only a delegated user can update an organisation.
# if not Delegate.objects.filter(email_user=request.user, organisation=self.get_object()).exists():
# messages.warning(self.request, 'You are not authorised to update this organisation. Please request delegated authority if required.')
# return HttpResponseRedirect(self.get_success_url())
# return super(OrganisationUpdate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationUpdate, self).get_context_data(**kwargs)
context['action'] = 'Update'
return context
def get_success_url(self):
return reverse('organisation_details_actions', args=(self.kwargs['pk'],'company'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['pk'],'company')))
return super(OrganisationUpdate, self).post(request, *args, **kwargs)
class OrganisationContactCreate(LoginRequiredMixin, CreateView):
"""A view to update an Organisation object.
"""
#model = OrganisationContact
form_class = apps_forms.OrganisationContactForm
template_name = 'applications/organisation_contact_form.html'
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = Organisation.objects.get(id=self.kwargs['pk'])
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
pass
else:
if admin_staff is True:
return super(OrganisationContactCreate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationContactCreate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationContactCreate, self).get_context_data(**kwargs)
context['action'] = 'Create'
# print self.get_object().pk
# context['organisation'] = self.get_object().pk
return context
def get_initial(self):
initial = super(OrganisationContactCreate, self).get_initial()
initial['organisation'] = self.kwargs['pk']
# print 'dsf dsaf dsa'
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['pk'],'contactdetails')))
return super(OrganisationContactCreate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.obj = form.save(commit=False)
org = Organisation.objects.get(id=self.kwargs['pk'])
self.obj.organisation = org
self.obj.save()
# Assign the creating user as a delegate to the new organisation.
messages.success(self.request, 'Organisation contact created successfully!')
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['pk'], 'contactdetails')))
class OrganisationContactUpdate(LoginRequiredMixin, UpdateView):
"""A view to update an Organisation object.
"""
model = OrganisationContact
form_class = apps_forms.OrganisationContactForm
template_name = 'applications/organisation_contact_form.html'
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
self.object = self.get_object()
if Delegate.objects.filter(email_user_id=request.user.id, organisation=self.object.organisation).exists():
pass
else:
if admin_staff is True:
return super(OrganisationContactUpdate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised.')
return HttpResponseRedirect(reverse('home_page'))
return super(OrganisationContactUpdate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(OrganisationContactUpdate, self).get_context_data(**kwargs)
context['action'] = 'Update'
return context
def get_initial(self):
initial = super(OrganisationContactUpdate, self).get_initial()
return initial
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.get_object().organisation.id,'contactdetails')))
return super(OrganisationContactUpdate, self).post(request, *args, **kwargs)
def form_valid(self, form):
self.obj = form.save()
# Assign the creating user as a delegate to the new organisation.
messages.success(self.request, 'Organisation contact updated successfully!')
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.get_object().organisation.id, 'contactdetails')))
class OrganisationAddressCreate(LoginRequiredMixin, CreateView):
"""A view to create a new address for an Organisation.
"""
model = OrganisationAddress
form_class = apps_forms.OrganisationAddressForm2
template_name = 'accounts/address_form.html'
def get_context_data(self, **kwargs):
context = super(OrganisationAddressCreate, self).get_context_data(**kwargs)
org = Organisation.objects.get(pk=self.kwargs['pk'])
context['principal'] = org.name
return context
def form_valid(self, form):
self.obj = form.save(commit=False)
# Attach the new address to the organisation.
org = Organisation.objects.get(pk=self.kwargs['pk'])
# ledger has a manadorary userfield. ( Mandatory should probably be removed)
self.obj.user = self.request.user
self.obj.organisation = org
self.obj.save()
if self.kwargs['type'] == 'postal':
org.postal_address = self.obj
elif self.kwargs['type'] == 'billing':
org.billing_address = self.obj
org.save()
return HttpResponseRedirect(reverse('organisation_details_actions', args=(self.kwargs['pk'],'address')))
#return HttpResponseRedirect(reverse('organisation_detail', args=(org.pk,)))
#class RequestDelegateAccess(LoginRequiredMixin, FormView):
# """A view to allow a user to request to be added to an organisation as a delegate.
# This view sends an email to all current delegates, any of whom may confirm the request.
# """
# form_class = apps_forms.DelegateAccessForm
# template_name = 'accounts/request_delegate_access.html'
#
# def get_organisation(self):
# return Organisation.objects.get(pk=self.kwargs['pk'])
#
# def get(self, request, *args, **kwargs):
# # Rule: redirect if the user is already a delegate.
# org = self.get_organisation()
# if Delegate.objects.filter(email_user=request.user, organisation=org).exists():
# messages.warning(self.request, 'You are already a delegate for this organisation!')
# return HttpResponseRedirect(self.get_success_url())
# return super(RequestDelegateAccess, self).get(request, *args, **kwargs)
#
# def get_context_data(self, **kwargs):
# context = super(RequestDelegateAccess, self).get_context_data(**kwargs)
# context['organisation'] = self.get_organisation()
# return context
#
# def get_success_url(self):
# return reverse('organisation_detail', args=(self.get_organisation().pk,))
#
# def post(self, request, *args, **kwargs):
# if request.POST.get('cancel'):
# return HttpResponseRedirect(self.get_success_url())
# # For each existing organisation delegate user, send an email that
# # contains a unique URL to confirm the request. The URL consists of the
# # requesting user PK (base 64-encoded) plus a unique token for that user.
# org = self.get_organisation()
# delegates = Delegate.objects.filter(email_user=request.user, organisation=org)
# if not delegates.exists():
# # In the event that an organisation has no delegates, the request
# # will be sent to all users in the "Processor" group.
# processor = Group.objects.get(name='Processor')
# recipients = [i.email for i in EmailUser.objects.filter(groups__in=[processor])]
# else:
# recipients = [i.emailuser.email for i in delegates]
# user = self.request.user
# uid = urlsafe_base64_encode(force_bytes(user.pk))
# # Note that the token generator uses the requesting user object to generate a hash.
# # This means that if the user object changes (e.g. they log out and in again),
# # the hash will be invalid. Therefore, this request/response needs to occur
# # fairly promptly to work.
# token = default_token_generator.make_token(user)
# url = reverse('confirm_delegate_access', args=(org.pk, uid, token))
# url = request.build_absolute_uri(url)
# subject = 'Delegate access request for {}'.format(org.name)
# message = '''The following user has requested delegate access for {}: {}\n
# Click here to confirm and grant this access request:\n{}'''.format(org.name, user, url)
# html_message = '''<p>The following user has requested delegate access for {}: {}</p>
# <p><a href="{}">Click here</a> to confirm and grant this access request.</p>'''.format(org.name, user, url)
# send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipients, fail_silently=False, html_message=html_message)
# # Send a request email to the recipients asynchronously.
# # NOTE: the lines below should remain commented until (if) async tasking is implemented in prod.
# #from django_q.tasks import async
# #async(
# # 'django.core.mail.send_mail', subject, message,
# # settings.DEFAULT_FROM_EMAIL, recipients, fail_silently=True, html_message=html_message,
# # hook='log_task_result')
# #messages.success(self.request, 'An email requesting delegate access for {} has been sent to existing delegates.'.format(org.name))
# # Generate an action record:
# action = Action(content_object=org, user=user, action='Requested delegate access')
# action.save()
# return super(RequestDelegateAccess, self).post(request, *args, **kwargs)
#class ConfirmDelegateAccess(LoginRequiredMixin, FormView):
# form_class = apps_forms.DelegateAccessForm
# template_name = 'accounts/confirm_delegate_access.html'
#
# def get_organisation(self):
# return Organisation.objects.get(pk=self.kwargs['pk'])
#
# def get(self, request, *args, **kwargs):
# # Rule: request user must be an existing delegate.
# org = self.get_organisation()
# delegates = Delegate.objects.filter(email_user=request.user, organisation=org)
# if delegates.exists():
# uid = urlsafe_base64_decode(self.kwargs['uid'])
# user = EmailUser.objects.get(pk=uid)
# token = default_token_generator.check_token(user, self.kwargs['token'])
# if token:
# return super(ConfirmDelegateAccess, self).get(request, *args, **kwargs)
# else:
# messages.warning(self.request, 'The request delegate token is no longer valid.')
# else:
# messages.warning(self.request, 'You are not authorised to confirm this request!')
# return HttpResponseRedirect(reverse('user_account'))
#
# def get_context_data(self, **kwargs):
# context = super(ConfirmDelegateAccess, self).get_context_data(**kwargs)
# context['organisation'] = self.get_organisation()
# uid = urlsafe_base64_decode(self.kwargs['uid'])
# context['requester'] = EmailUser.objects.get(pk=uid)
# return context
#
# def get_success_url(self):
# return reverse('organisation_detail', args=(self.get_organisation().pk,))
#
# def post(self, request, *args, **kwargs):
# uid = urlsafe_base64_decode(self.kwargs['uid'])
# req_user = EmailUser.objects.get(pk=uid)
# token = default_token_generator.check_token(req_user, self.kwargs['token'])
# # Change the requesting user state to expire the token.
# req_user.last_login = req_user.last_login + timedelta(seconds=1)
# req_user.save()
# if request.POST.get('cancel'):
# return HttpResponseRedirect(self.get_success_url())
# if token:
# org = self.get_organisation()
# Delegate.objects.create(email_user=req_user, organisation=org)
# messages.success(self.request, '{} has been added as a delegate for {}.'.format(req_user, org.name))
# else:
# messages.warning(self.request, 'The request delegate token is no longer valid.')
# return HttpResponseRedirect(self.get_success_url())
class UnlinkDelegate(LoginRequiredMixin, FormView):
form_class = apps_forms.UnlinkDelegateForm
template_name = 'accounts/confirm_unlink_delegate.html'
def get_organisation(self):
return Organisation.objects.get(pk=self.kwargs['pk'])
def get(self, request, *args, **kwargs):
# Rule: request user must be a delegate (or superuser).
context_processor = template_context(self.request)
admin_staff = context_processor['admin_staff']
org = self.get_organisation()
if Delegate.objects.filter(email_user_id=self.kwargs['user_id'], organisation=org).exists():
pass
else:
messages.error(self.request, 'User not found')
return HttpResponseRedirect(self.get_success_url())
if Delegate.objects.filter(email_user_id=request.user.id, organisation=org).exists():
# print delegates
# if request.user.id == delegates.email_user.id:
donothing = ""
else:
if admin_staff is True:
return super(UnlinkDelegate, self).get(request, *args, **kwargs)
else:
messages.error(self.request, 'You are not authorised to unlink a delegated user for {}'.format(org.name))
return HttpResponseRedirect(self.get_success_url())
return super(UnlinkDelegate, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UnlinkDelegate, self).get_context_data(**kwargs)
context['delegate'] = EmailUser.objects.get(pk=self.kwargs['user_id'])
return context
def get_success_url(self):
return reverse('organisation_details_actions', args=(self.get_organisation().pk,'linkedperson'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel'):
return HttpResponseRedirect(self.get_success_url())
return super(UnlinkDelegate, self).post(request, *args, **kwargs)
def form_valid(self, form):
# Unlink the specified user from the organisation.
org = self.get_organisation()
user = EmailUser.objects.get(pk=self.kwargs['user_id'])
delegateorguser = Delegate.objects.get(email_user=user, organisation=org)
delegateorguser.delete()
# Delegate.objects.delete(email_user=user, organisation=org)
messages.success(self.request, '{} has been removed as a delegate for {}.'.format(user, org.name))
# Generate an action record:
action = Action(content_object=org, user=self.request.user,
action='Unlinked delegate access for {}'.format(user.get_full_name()))
action.save()
return HttpResponseRedirect(self.get_success_url())
class BookingSuccessView(TemplateView):
template_name = 'applications/success.html'
def get(self, request, *args, **kwargs):
print ("BOOKING SUCCESS")
context_processor = template_context(self.request)
basket = None
context = {}
print ("START TEST")
if 'test' in request.session:
print (request.session['test'])
print ("END TEST")
print (request.session['basket_id'])
print (request.session['application_id'])
checkout_routeid = request.session['routeid']
basket_id = request.session['basket_id']
booking_id = request.session['booking_id']
booking = Booking.objects.get(id=booking_id)
app = Application.objects.get(id=request.session['application_id'])
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
actions = flow.getAllRouteActions(app.routeid, workflowtype)
route = {}
if app.routeid == checkout_routeid:
for a in actions:
if a['payment'] == 'success':
route = a
basket_row = Basket.objects.get(id=basket_id)
order = Order.objects.get(basket_id=basket_row.id)
invoices = Invoice.objects.filter(order_number=order.number)
for i in invoices:
BookingInvoice.objects.get_or_create(booking=booking, invoice_reference=i.reference)
#route = flow.getNextRouteObj('payment', app.routeid, workflowtype)
print ("PAYMENT ROUTE")
print (route)
groupassignment = Group.objects.get(name=DefaultGroups['grouplink'][route['routegroup']])
app.routeid = route["route"]
app.state = route["state"]
app.group = groupassignment
app.assignee = None
app.route_status = flow.json_obj[route["route"]]['title']
app.save()
utils.application_lodgment_info(self.request,app)
## Success message.
#msg = """Your {0} application has been successfully submitted. The application
#number is: <strong>WO-{1}</strong>.<br>
#Please note that routine applications take approximately 4-6 weeks to process.<br>
#If any information is unclear or missing, Parks and Wildlife may return your
#application to you to amend or complete.<br>
#The assessment process includes a 21-day external referral period. During this time
#your application may be referred to external departments, local government
#agencies or other stakeholders. Following this period, an internal report will be
#produced by an officer for approval by the Manager, Rivers and Estuaries Division,
#to determine the outcome of your application.<br>
#You will be notified by email once your {0} application has been determined and/or
#further action is required.""".format(app.get_app_type_display(), app.pk)
#messages.success(self.request, msg)
#emailcontext = {}
#emailcontext['app'] = app
#emailcontext['application_name'] = Application.APP_TYPE_CHOICES[app.app_type]
#emailcontext['person'] = app.submitted_by
#emailcontext['body'] = msg
#sendHtmlEmail([app.submitted_by.email], emailcontext['application_name'] + ' application submitted ', emailcontext, 'application-lodged.html', None, None, None)
return render(request, self.template_name, context)
class InvoicePDFView(InvoiceOwnerMixin,View):
def get(self, request, *args, **kwargs):
invoice = get_object_or_404(Invoice, reference=self.kwargs['reference'])
response = HttpResponse(content_type='application/pdf')
tc = template_context(request)
response.write(create_invoice_pdf_bytes('invoice.pdf',invoice, request, tc))
return response
def get_object(self):
invoice = get_object_or_404(Invoice, reference=self.kwargs['reference'])
return invoice
class ApplicationBooking(LoginRequiredMixin, FormView):
model = Application
form_class = apps_forms.PaymentDetailForm
template_name = 'applications/application_payment_details_form.html'
def render_page(self, request, booking, form, show_errors=False):
booking_mooring = None
booking_total = '0.00'
#application_fee = ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type)
to_date = datetime.now()
application_fee = None
if ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date).count() > 0:
application_fee = ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date)[0]
print ("APPLICATION FEE")
#lines.append(booking_change_fees)
return render(request, self.template_name, {
'form': form,
'booking': booking,
'application_fee': application_fee
})
def get_context_data(self, **kwargs):
context = super(ApplicationBooking, self).get_context_data(**kwargs)
application_fee = None
to_date = datetime.now()
pk=self.kwargs['pk']
app = Application.objects.get(pk=pk)
booking = {'app': app}
fee_total = '0.00'
print (ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date))
if ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date).count() > 0:
print ("APLICATIO FEE")
application_fee = ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date)[0]
fee_total = application_fee.licence_fee
context['application_fee'] = fee_total
context['override_reasons'] = DiscountReason.objects.all()
context['page_heading'] = 'Licence Fees'
context['allow_overide_access'] = self.request.user.groups.filter(name__in=['Statdev Processor', 'Statdev Assessor']).exists()
return context
def get(self, request, *args, **kwargs):
context_processor = template_context(self.request)
context_data = self.get_context_data(**kwargs)
print ("CCCC")
print (context_data)
#app = self.get_object()
pk=self.kwargs['pk']
app = Application.objects.get(pk=pk)
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext['application_submitter_id'] = app.submitted_by.id
if app.applicant:
if app.applicant.id == request.user.id:
flowcontext['application_owner'] = True
if Delegate.objects.filter(email_user=request.user).count() > 0:
flowcontext['application_owner'] = True
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
if flowcontext['may_payment'] == "False":
messages.error(self.request, 'You do not have permission to perform this action. AB')
return HttpResponseRedirect('/')
booking = {'app': app}
form = apps_forms.PaymentDetailForm
print ("TEMPLATE")
print (self.template_name,)
# if amount is zero automatically push appplication step.
print ("APPLCIATION FEES GET")
if float(context_data['application_fee']) > 0:
pass
# continue with rest of code logic
else:
# We dont need to ask for any money, proceed to next step automatically
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
DefaultGroups = flow.groupList()
flowcontext = {}
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
flowcontext = flow.getRequired(flowcontext, app.routeid, workflowtype)
actions = flow.getAllRouteActions(app.routeid, workflowtype)
route = {}
for a in actions:
if 'payment' in a:
if a['payment'] == 'success':
route = a
groupassignment = Group.objects.get(name=DefaultGroups['grouplink'][route['routegroup']])
app.routeid = route["route"]
app.state = route["state"]
app.group = groupassignment
app.assignee = None
app.route_status = flow.json_obj[route["route"]]['title']
app.save()
utils.application_lodgment_info(self.request,app)
return HttpResponseRedirect('/')
#####
return super(ApplicationBooking, self).get(request, *args, **kwargs)
return self.render_page(request, booking, form)
def post(self, request, *args, **kwargs):
pk=self.kwargs['pk']
app = Application.objects.get(pk=pk)
overridePrice = request.POST.get('overridePrice','0.00')
overrideReason = request.POST.get('overrideReason', None)
overrideDetail = request.POST.get('overrideDetail', None)
override_checkbox = request.POST.get('override_checkbox', 'off')
booking = {'app': app}
to_date = datetime.now()
application_fee = None
fee_total = '0.00'
if ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date).count() > 0:
application_fee = ApplicationLicenceFee.objects.filter(app_type=booking['app'].app_type,start_dt__lte=to_date, end_dt__gte=to_date)[0]
fee_total = application_fee.licence_fee
else:
raise ValidationError("Unable to find licence fees")
if override_checkbox == 'on':
fee_total = Decimal(overridePrice)
if Booking.objects.filter(customer=app.applicant,application=app).count() > 0:
booking_obj = Booking.objects.filter(customer=app.applicant,application=app)[0]
if override_checkbox == 'on':
booking_obj.override_price = Decimal(overridePrice)
booking_obj.override_reason = DiscountReason.objects.get(id=overrideReason)
booking_obj.override_reason_info = overrideDetail
booking_obj.customer=app.applicant
booking_obj.cost_total=fee_total
booking_obj.application=app
booking_obj.save()
else:
if override_checkbox == 'on':
booking_obj = Booking.objects.create(customer=app.applicant,cost_total=fee_total,application=app,override_price=Decimal(overridePrice),override_reason_info=overrideDetail, override_reason=DiscountReason.objects.get(id=overrideReason))
else:
booking_obj = Booking.objects.create(customer=app.applicant,cost_total=fee_total,application=app,override_price=None,override_reason_info=None, override_reason=None)
booking['booking'] = booking_obj
invoice_text = u"Your licence {} ".format('fees')
lines = []
lines.append({'ledger_description':booking['app'].get_app_type_display(),"quantity":1,"price_incl_tax": fee_total,"oracle_code":'00123sda', 'line_status': 1})
result = utils.checkout(request, booking, lines, invoice_text=invoice_text)
return result
def getPDFapplication(request,application_id):
if request.user.is_superuser:
app = Application.objects.get(id=application_id)
filename = 'pdfs/applications/'+str(app.id)+'-application.pdf'
if os.path.isfile(filename) is False:
# if app.id:
pdftool = PDFtool()
if app.app_type == 4:
pdftool.generate_emergency_works(app)
if os.path.isfile(filename) is True:
pdf_file = open(filename, 'rb')
pdf_data = pdf_file.read()
pdf_file.close()
return HttpResponse(pdf_data, content_type='application/pdf')
def getLedgerAppFile(request,file_id,extension):
allow_access = False
#file_group_ref_id
pd = PrivateDocument.objects.filter(id=file_id)
if pd.count() > 0:
pd_object = pd[0]
context_processor = template_context(request)
admin_staff = context_processor['admin_staff']
if admin_staff is True:
allow_access = True
if request.user.is_authenticated:
if pd_object.file_group_ref_id == request.user.id:
allow_access = True
if request.user.is_superuser:
allow_access = True
if allow_access is True:
# configs
api_key = settings.LEDGER_API_KEY
url = settings.LEDGER_API_URL+'/ledgergw/remote/documents/get/'+api_key+'/'
myobj = {'private_document_id': file_id}
# send request to server to get file
resp = requests.post(url, data = myobj)
image_64_decode = base64.b64decode(resp.json()['data'])
extension = resp.json()['extension']
if extension == 'msg':
return HttpResponse(image_64_decode, content_type="application/vnd.ms-outlook")
if extension == 'eml':
return HttpResponse(image_64_decode, content_type="application/vnd.ms-outlook")
return HttpResponse(image_64_decode, content_type=mimetypes.types_map['.'+str(extension)])
else:
return HttpResponse("Permission Denied", content_type="plain/html")
else:
return HttpResponse("Error loading document", content_type="plain/html")
def getAppFile(request,file_id,extension):
allow_access = False
#if request.user.is_superuser:
file_record = Record.objects.get(id=file_id)
app_id = file_record.file_group_ref_id
app_group = file_record.file_group
if (file_record.file_group > 0 and file_record.file_group < 12) or (file_record.file_group == 2003):
app = Application.objects.get(id=app_id)
if app.id == file_record.file_group_ref_id:
flow = Flow()
workflowtype = flow.getWorkFlowTypeFromApp(app)
flow.get(workflowtype)
flowcontext = {}
if app.assignee:
flowcontext['application_assignee_id'] = app.assignee.id
if app.submitted_by:
flowcontext['application_submitter_id'] = app.submitted_by.id
#flowcontext['application_owner'] = app.
if app.applicant:
if app.applicant.id == request.user.id:
flowcontext['application_owner'] = True
if request.user.is_authenticated:
if Delegate.objects.filter(email_user=request.user).count() > 0:
flowcontext['application_owner'] = True
flowcontext = flow.getAccessRights(request, flowcontext, app.routeid, workflowtype)
if flowcontext['allow_access_attachments'] == "True":
allow_access = True
if allow_access is False:
if request.user.is_authenticated:
refcount = Referral.objects.filter(application=app,referee=request.user).exclude(status=5).count()
if refcount > 0:
allow_access = True
ref = Referral.objects.filter(application=app,referee=request.user).exclude(status=5)[0]
#for i in ref.records.all():
# if int(file_id) == i.id:
# allow_access = True
if file_record.file_group == 2005:
app = Approval.objects.get(id=app_id)
if app.applicant:
if app.applicant.id == request.user.id or request.user.is_staff is True:
allow_access = True
if file_record.file_group == 2007:
app = Approval.objects.get(id=app_id)
if app.applicant:
if request.user.is_staff is True:
allow_access = True
if file_record.file_group == 2006:
app = Compliance.objects.get(id=app_id)
if app.applicant:
if app.applicant.id == request.user.id or request.user.is_staff is True:
allow_access = True
if allow_access == True:
file_record = Record.objects.get(id=file_id)
file_name_path = file_record.upload.path
if os.path.isfile(file_name_path) is True:
the_file = open(file_name_path, 'rb')
the_data = the_file.read()
the_file.close()
if extension == 'msg':
return HttpResponse(the_data, content_type="application/vnd.ms-outlook")
if extension == 'eml':
return HttpResponse(the_data, content_type="application/vnd.ms-outlook")
return HttpResponse(the_data, content_type=mimetypes.types_map['.'+str(extension)])
else:
return HttpResponse("Error loading attachment", content_type="plain/html")
return
# filename = 'pdfs/applications/'+str(app.id)+'-application.pdf'
# if os.path.isfile(filename) is False:
## if app.id:
# pdftool = PDFtool()
# if app.app_type == 4:
# pdftool.generate_emergency_works(app)
#
# if os.path.isfile(filename) is True:
# pdf_file = open(filename, 'rb')
# pdf_data = pdf_file.read()
# pdf_file.close()
# return HttpResponse(pdf_data, content_type='application/pdf')
|
import pandas as pd
import jieba
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
def process(txt):
txt = txt.replace("\r", "").replace("\n", "")
return " ".join(jieba.cut(txt))
vectorizer = TfidfVectorizer()
df = pd.read_csv("mood.csv", encoding="utf-8-sig")
df["review"] = df["review"].apply(process)
x = df["review"]
y = df["label"]
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2)
x_train = vectorizer.fit_transform(x_train)
model = MultinomialNB(alpha=0.001)
model.fit(x_train, y_train)
x_test = vectorizer.transform(x_test)
y_pred = model.predict(x_test)
accuracy = accuracy_score(y_test, y_pred)
print(f"accuracy: {accuracy}")
|
# _*_ code:utf-8 _*_
#!/usr/local/bin/python
txt = input('请输入字符串:')
# method 1
print(txt == txt[::-1])
# method 2
for i in range(int(len(txt)/2)):
if txt[i] != txt[-1-i]:
print(False)
print(True)
# method 3
txt1 = ""
for i in txt:
txt1 = i + txt1
print(txt1 == txt)
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/test_macro')
def test_macro():
return render_template('test_macro.html')
@app.route('/macro')
def macro():
return render_template('macro.html')
@app.route('/')
def index():
return 'hello'
if __name__ == "__main__":
app.run()
|
import pandas as pd
import numpy as np
import processSeq
import sys
import tensorflow as tf
import keras
keras.backend.image_data_format()
from keras import backend as K
from keras import regularizers
from keras.regularizers import l1, l2, l1_l2
from keras.optimizers import Adam
from keras_self_attention import SeqSelfAttention
from keras.engine.topology import Layer
from keras_layer_normalization import LayerNormalization
from keras.layers import Input, Dense, Average, Reshape, Lambda, Conv1D, Flatten, MaxPooling1D, UpSampling1D, GlobalMaxPooling1D
from keras.layers import LSTM, Bidirectional, BatchNormalization, Dropout, Concatenate, Embedding, Activation, Dot, dot
from keras.layers import TimeDistributed, RepeatVector, Permute, merge, Multiply
from keras.activations import relu
from keras.layers.advanced_activations import LeakyReLU, PReLU, ReLU
from keras.models import Sequential, Model, clone_model
from keras.utils import to_categorical
from keras.callbacks import EarlyStopping,ModelCheckpoint
from keras.constraints import unitnorm
import sklearn as sk
from sklearn.preprocessing import StandardScaler, LabelEncoder, OneHotEncoder
from sklearn.manifold import LocallyLinearEmbedding, MDS, Isomap, TSNE
from sklearn.decomposition import PCA, IncrementalPCA, KernelPCA, SparsePCA, TruncatedSVD, FastICA, MiniBatchDictionaryLearning
from sklearn.random_projection import GaussianRandomProjection, SparseRandomProjection
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import KFold, train_test_split
from sklearn.metrics import mean_squared_error, explained_variance_score, mean_absolute_error, median_absolute_error, r2_score
from sklearn.metrics import precision_recall_curve, roc_curve
from sklearn.metrics import average_precision_score, precision_score, recall_score, f1_score, roc_auc_score, accuracy_score, matthews_corrcoef
import xgboost
from processSeq import load_seq_1, kmer_dict, load_seq_2, load_seq_2_kmer
from scipy import stats
from scipy.stats import skew, pearsonr, spearmanr, wilcoxon, mannwhitneyu,kstest,ks_2samp, chisquare
from scipy import signal
from scipy.signal import find_peaks, find_peaks_cwt, peak_prominences
from statsmodels.stats.multitest import multipletests
from timeit import default_timer as timer
import time
import matplotlib.pyplot as plt
import matplotlib.pylab as pylab
import matplotlib.ticker as ticker
import matplotlib.gridspec as gridspec
from matplotlib import rcParams
plt.switch_backend('Agg')
import seaborn as sns
import h5py
import os.path
from optparse import OptionParser
clipped_relu = lambda x: relu(x, max_value=1.0)
import multiprocessing as mp
import threading
import sys
n_epochs = 100
drop_out_rate = 0.5
learning_rate = 0.001
validation_split_ratio = 0.1
BATCH_SIZE = 128
NUM_DENSE_LAYER = 2
NUM_CONV_LAYER_2 = 2 # number of convolutional layers
MODEL_PATH = './test2_2.2'
READ_THRESHOLD = 100
CLIPNORM1 = 1000.0
def mapping_Idx(serial1,serial2):
if len(np.unique(serial1))<len(serial1):
print("error! ref_serial not unique", len(np.unique(serial1)), len(serial1))
return
unique_flag = 1
t_serial2 = np.unique(serial2,return_inverse=True)
if len(t_serial2[0])<len(serial2):
# print("serial2 not unique!")
serial2_ori = serial2.copy()
serial2 = t_serial2[0]
unique_flag = 0
ref_serial = np.sort(serial1)
ref_sortedIdx = np.argsort(serial1)
ref_serial = np.int64(ref_serial)
map_serial = np.sort(serial2)
map_sortedIdx = np.argsort(serial2)
map_serial = np.int64(map_serial)
num1 = np.max((ref_serial[-1],map_serial[-1]))+1
vec1 = np.zeros((num1,2))
vec1[map_serial,0] = 1
b = np.where(vec1[ref_serial,0]>0)[0]
vec1[ref_serial,1] = 1
b1 = np.where(vec1[map_serial,1]>0)[0]
idx = ref_sortedIdx[b]
idx1 = -np.ones(len(map_serial))
idx1[map_sortedIdx[b1]] = idx
if unique_flag==0:
idx1 = idx1[t_serial2[1]]
return np.int64(idx1)
def search_Idx(serial1,serial2):
id1 = mapping_Idx(serial1,serial2)
b2 = np.where(id1<0)[0]
if len(b2)>0:
print('error!',len(b2))
return
return id1
def smooth(x,window_len=11,window='hanning'):
"""smooth the data using a window with requested size.
"""
# if x.ndim != 1:
# raise ValueError, "smooth only accepts 1 dimension arrays."
# if x.size < window_len:
# raise ValueError, "Input vector needs to be bigger than window size."
assert x.ndim==1
assert x.size==window_len
if window_len<3:
return x
flag = (window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman'])
assert flag==1
s=numpy.r_[x[window_len-1:0:-1],x,x[-2:-window_len-1:-1]]
if window == 'flat': #moving average
w=numpy.ones(window_len,'d')
else:
w=eval('numpy.'+window+'(window_len)')
y=numpy.convolve(w/w.sum(),s,mode='valid')
return y
def score_2a(y, y_predicted):
score1 = mean_squared_error(y, y_predicted)
score2 = pearsonr(y, y_predicted)
score3 = explained_variance_score(y, y_predicted)
score4 = mean_absolute_error(y, y_predicted)
score5 = median_absolute_error(y, y_predicted)
score6 = r2_score(y, y_predicted)
score7, pvalue = spearmanr(y,y_predicted)
vec1 = [score1, score2[0], score2[1], score3, score4, score5, score6, score7, pvalue]
return vec1
def score_2a_1(data1, data2, alternative='greater'):
try:
mannwhitneyu_statistic, mannwhitneyu_pvalue = mannwhitneyu(data1,data2,alternative=alternative)
except:
mannwhitneyu_statistic, mannwhitneyu_pvalue = -1, 1.1
if alternative=='greater':
alternative1 = 'less'
else:
alternative1 = alternative
try:
ks_statistic, ks_pvalue = ks_2samp(data1,data2,alternative=alternative1)
except:
ks_statistic, ks_pvalue = -1, 1.1
vec1 = [[mannwhitneyu_pvalue,ks_pvalue],
[mannwhitneyu_statistic,ks_statistic]]
return vec1
def score_2a_2(y,y_ori,y_predicted,type_id=0,chrom=[],by_chrom=0):
sample_num = len(y)
middle_point = 0
if np.min(y_ori)>-0.2:
middle_point = 0.5
thresh_1 = len(np.where(y_ori<middle_point)[0])/sample_num
temp1 = np.quantile(y,thresh_1)
temp3 = np.quantile(y_predicted,thresh_1)
thresh_2 = 0.5
temp2 = np.quantile(y_ori,thresh_2)
print(sample_num,thresh_1,temp1,temp2,temp3)
print(np.max(y_ori),np.min(y_ori),np.max(y),np.min(y),np.max(y_predicted),np.min(y_predicted))
thresh = temp1
y1 = np.zeros_like(y,dtype=np.int8)
y2 = y1.copy()
y1[y>thresh] = 1
if type_id>0:
thresh = temp3
y2[y_predicted>thresh] = 1
y_predicted_scale = stats.rankdata(y_predicted,'average')/len(y_predicted)
accuracy, auc, aupr, precision, recall, F1 = score_function(y1,y2,y_predicted_scale)
list1, list2 = [], []
if by_chrom==1:
assert len(chrom)>0
chrom_vec = np.unique(chrom)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
t_vec1 = score_function(y1[b1],y2[b1],y_predicted_scale[b1])
list1.append(chrom_id)
list2.append(t_vec1)
list2 = np.asarray(list2)
return (accuracy, auc, aupr, precision, recall, F1, list1, list2)
def writeToBED(filename1,filename2,color_vec):
data1 = pd.read_csv(filename1,header=None,sep='\t')
colnames = list(data1)
fields = ['chrom','start','stop','name','score','strand','thickStart','thickEnd','itemRgb']
data2 = pd.DataFrame(columns=fields)
for i in range(3):
data2[fields[i]] = data1[colnames[i]]
num1 = data1.shape[0]
data2['name'] = list(range(num1))
data2['score'] = [600]*num1
data2['strand'] = ['.']*num1
data2['thickStart'], data2['thickEnd'] = data2['start'], data2['stop']
color1 = np.asarray([color_vec[0]]*num1)
color1[range(1,num1,2)] = color_vec[1]
data2['itemRgb'] = color1
data2.to_csv(filename2,header=False,index=False,sep='\t')
return True
# reciprocal mapping
# input: filename_1: orignal position file on genome 1
# filename1: positions mapped from genome 1 to genome 2
# filename2: positions mapped from genome 2 to genome 1
# output_filename1: positions with reciprocal mapping on genome 2
# output_filename1: positions with reciprocal mapping on genome 1
def remapping_serial(filename_1,filename1,filename2,output_filename1,output_filename_1=''):
data_1 = pd.read_csv(filename_1,header=None,sep='\t')
chrom_1, start_1, stop_1 = np.asarray(data_1[0]), np.asarray(data_1[1]), np.asarray(data_1[2])
serial_1 = np.asarray(data_1[3])
data1 = pd.read_csv(filename1,header=None,sep='\t')
serial1 = np.asarray(data1[3])
data2 = pd.read_csv(filename2,header=None,sep='\t')
chrom2, start2, stop2 = np.asarray(data2[0]), np.asarray(data2[1]), np.asarray(data2[2])
serial2 = np.asarray(data2[3])
id1 = mapping_Idx(serial_1,serial2)
assert np.sum(id1<0)==0
num1 = len(serial2)
id2 = np.zeros(num1,dtype=np.int8)
for i in range(num1):
t_chrom2, t_start2, t_stop2 = chrom2[i], start2[i], stop2[i]
t_chrom_1, t_start_1, t_stop_1 = chrom_1[id1[i]], start_1[id1[i]], stop_1[id1[i]]
if (t_chrom_1==t_chrom2) and (t_start2<t_stop_1) and (t_stop2>t_start_1):
id2[i] = 1
b1 = np.where(id2>0)[0]
serial_2 = serial2[b1]
id_1 = mapping_Idx(serial_1,serial_2)
id_2 = mapping_Idx(serial1,serial_2)
data_1 = data_1.loc[id_1,:]
data1 = data1.loc[id_2,:]
if output_filename_1!='':
data_1.to_csv(output_filename_1,index=False,header=False,sep='\t')
data1.to_csv(output_filename1,index=False,header=False,sep='\t')
return True
# co-binding
def binding_1(filename_list,output_filename_1,distance_thresh=10000):
filename1 = filename_list[0]
num1 = len(filename_list)
data1 = pd.read_csv(filename1,header=None,sep='\t')
region_num = len(data1)
colnames = list(data1)
col1, col2, col3 = colnames[0], colnames[1], colnames[2]
chrom1, start1, stop1 = np.asarray(data1[col1]), np.asarray(data1[col2]), np.asarray(data1[col3])
for i in range(1,num1):
filename2 = filename1_list[i]
data2 = pd.read_csv(filename2,header=None,sep='\t')
region_num2 = len(data2)
colnames = list(data1)
col1, col2, col3 = colnames[0], colnames[1], colnames[2]
chrom2, start2, stop2 = np.asarray(data1[col1]), np.asarray(data1[col2]), np.asarray(data1[col3])
def binding1(filename1,output_filename,output_filename1=''):
data1 = pd.read_csv(filename1,header=None,sep='\t')
chrom1 = np.asarray(data1[0])
b1 = np.where((chrom1!='chrX')&(chrom1!='chrY'))[0]
data1 = data1.loc[b1,:]
data1.reset_index(drop=True,inplace=True)
chrom1, start1, stop1 = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2])
chrom2, start2, stop2 = np.asarray(data1[10]), np.asarray(data1[11]), np.asarray(data1[12])
region_num = len(chrom1)
t1 = np.min([start1,start2],axis=0)
t2 = np.max([stop1,stop2],axis=0)
fields = ['chrom','start','stop','start1','stop1','start2','stop2','region_len']
region_len = t2-t1
print(np.min(region_len),np.max(region_len),np.median(region_len))
data1 = pd.DataFrame(columns=fields)
data1['chrom'] = chrom1
data1['start'], data1['stop'] = t1, t2
data1['start1'], data1['stop1'] = start1, stop1
data1['start2'], data1['stop2'] = start2, stop2
data1['region_len'] = region_len
data1.to_csv(output_filename,header=False,index=False,sep='\t')
if output_filename1=='':
b1 = output_filename.find('.txt')
output_filename1 = output_filename[0:b1]+'.bed'
data2 = data1.loc[:,['chrom','start','stop']]
data2['serial'] = np.arange(region_num)+1
data2.to_csv(output_filename1,header=False,index=False,sep='\t')
return data1
def region_annotation(filename1):
compare_with_regions_peak_search1(chrom,start,stop,serial,value,seq_list,thresh_vec=[0.9])
def peak_extend(chrom_ori,start_ori,serial_ori,chrom,start,serial,flanking=2):
num1 = len(chrom)
sample_num = len(chrom_ori)
vec1 = np.zeros(sample_num,dtype=np.int64)
print(num1,sample_num)
t1 = serial+flanking
chrom_vec = np.unique(chrom)
size1 = 2*flanking+1
label = np.arange(1,num1+1)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
t_serial1 = serial[b1]
num2 = len(t_serial1)
t1 = np.outer(t_serial1,np.ones(size1))
t2 = t1+np.outer(np.ones(num2),np.arange(-flanking,flanking+1))
b2 = np.where(chrom_ori==chrom_id)[0]
s1 = np.min(serial_ori[b2])
s2 = np.max(serial_ori[b2])
t2[t2<s1] = s1
t2[t2>s2] = s2
t_label1 = label[b1]
label1 = -np.repeat(t_label1,size1)
t2 = np.ravel(t2)
id1 = mapping_Idx(serial_ori,t2)
id2 = (id1>=0)
vec1[id1[id2]] = label1[id2]
b1 = mapping_Idx(serial_ori,serial)
assert np.sum(b1<0)==0
vec1[b1] = label
return vec1
# select genomic loci with high estimated importance scores
def find_region_sub1_ori(filename_list1,output_filename1,output_filename2='',load=1,config={}):
pd.set_option("display.precision", 8)
type_id1, type_id2 = 0, 1
filename_centromere = config['filename_centromere']
if (load==0) or (os.path.exists(output_filename1)==False):
print(filename_list1)
print(output_filename1)
data1, chrom_numList = select_region1_merge(filename_list1,output_filename1,
type_id1=type_id1,type_id2=type_id2,
filename_centromere=filename_centromere)
else:
data1 = pd.read_csv(output_filename1,sep='\t')
if filename_centromere!='':
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
id1 = select_idx_centromere(chrom,start,stop,filename_centromere)
print('select_idx_centromere', len(chrom), len(id1), len(id1)/len(chrom))
data1 = data1.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
vec1 = ['Q1','Q2']
type_id = 1
sel_column = vec1[type_id]
print(output_filename1,list(data1),data1.shape)
attention1 = np.asarray(data1[sel_column])
thresh = config['thresh'] # thresh = 0.95
# test 1: predicted attention above threshold
id1 = np.where(attention1>thresh)[0]
print(len(id1),len(attention1),len(id1)/len(attention1))
vec1 = peak_extend(chrom,start,serial,chrom[id1],start[id1],serial[id1],flanking=2)
# test 2: predicted attention local peak above 0.95
signal = np.asarray(data1['signal'])
attention_1 = np.asarray(data1['predicted_attention'])
print(data1.shape)
value = np.column_stack((attention_1,attention1))
seq_list = generate_sequences_chrom(chrom,serial)
num1 = len(seq_list)
cnt1 = 0
for i in range(num1):
cnt1 += seq_list[i][1]-seq_list[i][0]+1
print(cnt1)
if 'thresh_vec_pre' in config:
thresh_vec_pre = config['thresh_vec_pre']
else:
thresh_vec_pre = [0.90,0.50,0.50]
peak_thresh_1, peak_thresh_2, peak_thresh_3 = thresh_vec_pre
if 'distance_thresh_vec' in config:
distance_thresh_vec = config['distance_thresh_vec']
else:
distance_thresh_vec = [[-1,5],[0.25,1]]
distance_thresh_1, distance_thresh_2 = distance_thresh_vec
thresh_vec = [peak_thresh_1]
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 0
config['threshold'] = distance_thresh_1[0]
config['distance_peak_thresh'] = distance_thresh_1[1]
print('compare with regions peak search')
dict2 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
chrom1, start1, stop1, serial1, annot1 = dict2[thresh_vec[0]]
vec2 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
# test 3: predicted attention local peak above distance of 0.25
value = np.column_stack((attention1,attention1))
thresh_vec = [peak_thresh_2]
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 0
config['threshold'] = distance_thresh_2[0] # distance of value from peak to neighbors
config['distance_peak_thresh'] = distance_thresh_2[1] # update for peak distance
print('compare with regions peak search')
dict2_1 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
chrom1, start1, stop1, serial1, annot1 = dict2_1[thresh_vec[0]]
vec2_1 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
# test 4: predicted attention local peak wavelet transformation
thresh_vec = [peak_thresh_3]
value = np.column_stack((attention_1,attention1))
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 1
dict3 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
chrom1, start1, stop1, serial1, annot1 = dict3[thresh_vec[0]]
vec3 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
print(len(vec1),len(vec2),len(vec2_1),len(vec3))
data1['sel1'], data1['sel2'], data1['sel2.1'], data1['sel3'] = vec1, vec2, vec2_1, vec3
if output_filename2=='':
b1 = output_filename1.find('.txt')
output_filename2 = output_filename1[0:b1]+'.2.txt'
print('find_region_sub1',data1.shape)
print(output_filename2)
data1.to_csv(output_filename2,index=False,sep='\t',float_format='%.6f')
return data1
# select genomic loci with high estimated importance scores
def find_region_sub1(filename_list1,output_filename1,output_filename2='',load=1,config={}):
pd.set_option("display.precision", 8)
type_id1, type_id2 = 0, 1
filename_centromere = config['filename_centromere']
if (load==0) or (os.path.exists(output_filename1)==False):
print(filename_list1)
print(output_filename1)
data1, chrom_numList = select_region1_merge(filename_list1,output_filename1,
type_id1=type_id1,type_id2=type_id2,
filename_centromere=filename_centromere)
else:
data1 = pd.read_csv(output_filename1,sep='\t')
if filename_centromere!='':
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
id1 = select_idx_centromere(chrom,start,stop,filename_centromere)
print('select_idx_centromere', len(chrom), len(id1), len(id1)/len(chrom))
data1 = data1.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
vec1 = ['Q1','Q2']
type_id = 1
sel_column = vec1[type_id]
print(output_filename1,list(data1),data1.shape)
# result = pd.merge(left, right, left_index=True, right_index=True, how='outer')
attention1 = np.asarray(data1[sel_column])
thresh = config['thresh'] # thresh = 0.95
# test 1: predicted attention above threshold
id1 = np.where(attention1>thresh)[0]
print(len(id1),len(attention1),len(id1)/len(attention1))
vec1 = peak_extend(chrom,start,serial,chrom[id1],start[id1],serial[id1],flanking=2)
# test 2: predicted attention local peak above 0.95
signal = np.asarray(data1['signal'])
attention_1 = np.asarray(data1['predicted_attention'])
print(data1.shape)
value = np.column_stack((attention_1,attention1))
seq_list = generate_sequences_chrom(chrom,serial)
num1 = len(seq_list)
cnt1 = 0
for i in range(num1):
cnt1 += seq_list[i][1]-seq_list[i][0]+1
print(cnt1)
if 'thresh_vec_pre' in config:
thresh_vec_pre = config['thresh_vec_pre']
else:
thresh_vec_pre = [0.90,0.50,0.50]
peak_thresh_1, peak_thresh_2, peak_thresh_3 = thresh_vec_pre
if 'distance_thresh_vec' in config:
distance_thresh_vec = config['distance_thresh_vec']
else:
distance_thresh_vec = [[[-1,1],[-1,3],[-1,5]],[0.25,1]]
distance_thresh_list1, distance_thresh_2 = distance_thresh_vec
thresh_vec = [peak_thresh_1]
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 0
peak_list1 = []
for distance_thresh_1 in distance_thresh_list1:
config['threshold'] = distance_thresh_1[0]
config['distance_peak_thresh'] = distance_thresh_1[1]
print(distance_thresh_1)
print('compare with regions peak search')
dict2 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
# dict1[0] = dict2[thresh_vec[0]]
chrom1, start1, stop1, serial1, annot1 = dict2[thresh_vec[0]]
vec2 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
peak_list1.append(vec2)
# test 3: predicted attention local peak above distance of 0.25
value = np.column_stack((attention1,attention1))
thresh_vec = [peak_thresh_2]
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 0
config['threshold'] = distance_thresh_2[0] # distance of value from peak to neighbors
config['distance_peak_thresh'] = distance_thresh_2[1] # update for peak distance
print('compare with regions peak search')
dict2_1 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
chrom1, start1, stop1, serial1, annot1 = dict2_1[thresh_vec[0]]
vec2_1 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
# test 4: predicted attention local peak wavelet transformation
thresh_vec = [peak_thresh_3]
value = np.column_stack((attention_1,attention1))
config = {'thresh_vec':thresh_vec}
config['peak_type'] = 1
dict3 = compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config)
chrom1, start1, stop1, serial1, annot1 = dict3[thresh_vec[0]]
vec3 = peak_extend(chrom,start,serial,chrom1,start1,serial1,flanking=2)
data1['sel1'] = vec1
cnt = len(peak_list1)
for i in range(cnt):
if i==0:
t_colname = 'sel2'
else:
t_colname = 'sel2.0.%d'%(i)
data1[t_colname] = peak_list1[i]
data1['sel2.1'], data1['sel3'] = vec2_1, vec3
if output_filename2=='':
b1 = output_filename1.find('.txt')
output_filename2 = output_filename1[0:b1]+'.2.txt'
print('find_region_sub1',data1.shape)
print(output_filename2)
data1.to_csv(output_filename2,index=False,sep='\t',float_format='%.6f')
return data1
# write genomie loci to bed file
# def find_region_sub2(filename1,output_filename1,config={}):
# data1 = pd.read_csv(filename1,sep='\t')
# colnames = list(data1)
# colnames_1 = ['sel1','sel2','sel2.1','sel3']
# label_1 = np.asarray(data1.loc[:,colnames_1])
# sel1, sel2, sel2_1 = label_1[:,0], label_1[:,1], label_1[:,2]
# b1, b2, b3 = np.where(sel1>0)[0], np.where(sel2>0)[0], np.where(sel2_1>0)[0]
# b_1, b_2, b_3 = np.where(sel1!=0)[0], np.where(sel2!=0)[0], np.where(sel2_1!=0)[0]
# chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
# sel_column = ['Q2']
# predicted_attention, predicted_attention1 = np.asarray(data1['predicted_attention']), np.asarray(data1[sel_column])
# thresh = config['thresh_select']
# id1 = np.where(predicted_attention1[b3]>thresh)[0]
# id1 = b3[id1]
# id2 = np.union1d(b2,id1) # local peaks
# id3 = np.union1d(b1,id2) # local peaks or high scores
# id5 = np.intersect1d(b2,id3) # local peaks
# id6 = np.intersect1d(b1,id5) # local peaks and high values
# print('select',len(b1),len(b2),len(b3),len(id1),len(id2),len(id3),len(id5),len(id6))
# sample_num = data1.shape[0]
# t_label = np.zeros(sample_num,dtype=np.int8)
# list1 = [b1,b2,id3,id6]
# for i in range(len(list1)):
# t_label[list1[i]] = i+1
# sel_id = np.where(t_label>0)[0]
# data1 = data1.loc[sel_id,['chrom','start','stop','serial','predicted_attention','Q2']]
# data1.reset_index(drop=True,inplace=True)
# data1['label'] = t_label[t_label>0]
# data1.to_csv(output_filename1,index=False,sep='\t')
# return True
# write genomie loci to bed file
# def find_region_sub2_1(filename1,output_filename1,config={}):
# data1 = pd.read_csv(filename1,sep='\t')
# colnames = list(data1)
# # high value, local peak (distance>=1), local peak with signal difference (distance>=1), wavelet local peak
# colnames_1 = ['sel1','sel2','sel2.1','sel3']
# num1 = len(colnames)
# # local peak with different distances
# for i in range(num1):
# t_colname = colnames[i]
# if t_colname.find('sel2.0')>=0:
# # print(1,t_colname)
# colnames_1.append(t_colname)
# # else:
# # print(0,t_colname)
# colnames_2 = colnames_1[0:3]+colnames_1[4:]
# num2 = len(colnames_2)
# print('colnames_2',colnames_2)
# chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
# sel_column = 'Q2'
# predicted_attention, predicted_attention1 = np.asarray(data1['predicted_attention']), np.asarray(data1[sel_column])
# thresh = config['thresh_select']
# region_num = data1.shape[0]
# mask = np.zeros((region_num,num2),dtype=np.int8)
# thresh = config['thresh_select']
# thresh_2 = config['thresh_2']
# for i in range(num2):
# t_colname = colnames_2[i]
# t_column = np.asarray(data1[t_colname])
# b1 = np.where(t_column>0)[0]
# if t_colname=='sel2.1':
# id1 = np.where(predicted_attention1[b1]>thresh)[0]
# b1 = b1[id1]
# if t_colname=='sel1':
# id1 = np.where(predicted_attention1[b1]>thresh_2)[0]
# b1 = b1[id1]
# mask[b1,i] = 1
# label_value = np.zeros(region_num,dtype=np.int32)
# for i1 in range(num2):
# label_value = label_value + (10**i1)*mask[:,i1]
# t_label = label_value
# sel_id = np.where(t_label>0)[0]
# data1 = data1.loc[sel_id,['chrom','start','stop','serial','predicted_attention','Q2']]
# data1.reset_index(drop=True,inplace=True)
# data1['label'] = t_label[t_label>0]
# data1.to_csv(output_filename1,index=False,sep='\t')
# return True
# merge neighboring important genomic loci into regions
# def find_region_sub3(filename1,output_filename1,config={}):
# data1 = pd.read_csv(filename1,sep='\t')
# chrom = np.asarray(data1['chrom'])
# t_score = np.asarray(data1['Q2'])
# t_label = np.asarray(data1['label'])
# colnames = list(data1)
# thresh1 = config['thresh_select']
# b1 = np.where(t_score>thresh1)[0]
# b2 = np.where(t_label>=3)[0]
# b1 = np.intersect1d(b1,b2)
# data1 = data1.loc[b1,:]
# data1.reset_index(drop=True,inplace=True)
# chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
# t_score = np.asarray(data1['Q2'])
# t_percentile = np.asarray(data1['label_1'])
# id1 = [int(chrom1[3:]) for chrom1 in chrom]
# idx_sel_list = np.column_stack((id1,serial))
# seq_list = generate_sequences(idx_sel_list,gap_tol=5)
# data_1 = output_generate_sequences(chrom,start,stop,serial,idx_sel_list,seq_list,output_filename='temp1.txt',save_mode=0)
# serial1, serial2 = np.asarray(data_1['serial1']), np.asarray(data_1['serial2'])
# num1 = len(serial1)
# list_1, list_2, list_3 = [], [], []
# for i in range(num1):
# b1 = np.where((serial<=serial2[i])&(serial>=serial1[i]))[0]
# list1 = [str(serial[i1]) for i1 in b1]
# list2 = ['%.4f'%(t_score1) for t_score1 in t_score[b1]]
# list3 = ['%.4f'%(t_percent1) for t_percent1 in t_percentile[b1]] # signal percentile
# d1 = ','
# str1 = d1.join(list1)
# str2 = d1.join(list2)
# list_1.append(str1)
# list_2.append(str2)
# list_3.append(d1.join(list3))
# data_1['loci'] = list_1
# data_1['score'] = list_2
# data_1['signal_percentile'] = list_3
# data_1.to_csv(output_filename1,index=False,sep='\t')
# return data_1
# generate serial for bed file
# def find_region_sub3_1(filename1,genome_file='',chrom_num=19):
# data1 = pd.read_csv(filename1,header=None,sep='\t')
# colnames = list(data1)
# if len(colnames)<5:
# chrom, start, stop, signal = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2]), np.asarray(data1[3])
# serial, start_vec = generate_serial_start(genome_file,chrom,start,stop,chrom_num=chrom_num,type_id=0)
# data1['serial'] = serial
# id1 = np.where(serial>=0)[0]
# data1 = data1.loc[id1,colnames[0:3]+['serial']+colnames[3:]]
# b1 = filename1.find('.bedGraph')
# output_filename = filename1[0:b1]+'.bed'
# data1 = data1.sort_values(by=['serial'])
# data1.to_csv(output_filename,header=False,index=False,sep='\t')
# # else:
# # chrom, start, stop, signal = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2]), np.asarray(data1[4])
# # serial = np.asarray(data1[3])
# return data1
# generate domain labels
# input: filename1: original RT data
# generate domain labels
def find_region_sub3_2(filename1):
fields = ['chrom','start','stop','serial','signal','label',
'q1','q2','q_1','q_2','local_peak1','local_peak2']
num_fields = len(fields)
data1_ori = pd.read_csv(filename1,header=None,sep='\t')
colnames = list(data1_ori)
t_num_fields = len(colnames)
if t_num_fields>=num_fields:
return
if t_num_fields<10:
data1_ori = region_quantile_signal(filename1)
print('region_quantile_signal',data1_ori.shape)
# return
colnames = list(data1_ori)
t_num_fields = len(colnames)
if t_num_fields<num_fields-1:
data1_ori = region_local_signal(filename1)
print('region_local_signal',data1_ori.shape)
# return
colnames = list(data1_ori)
chrom = np.asarray(data1_ori[0])
sample_num = len(chrom)
label_ori = np.zeros(sample_num,dtype=np.int64)
id1 = np.where((chrom!='chrX')&(chrom!='chrY')&(chrom!='chrM'))[0]
data1 = data1_ori.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom, start, stop, signal = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2]), np.asarray(data1[4])
serial = np.asarray(data1[3])
sample_num1 = len(chrom)
label1 = np.zeros(sample_num1,dtype=np.int64)
if np.abs(np.median(signal))>0.35:
thresh = 0.5
else:
thresh = 0
b1 = np.where(signal>thresh)[0]
b2 = np.where(signal<=thresh)[0]
id_1 = [int(chrom1[3:]) for chrom1 in chrom[b1]]
idx_sel_list1 = np.column_stack((id_1,serial[b1]))
seq_list1 = generate_sequences(idx_sel_list1,gap_tol=5)
id_2 = [int(chrom1[3:]) for chrom1 in chrom[b2]]
idx_sel_list2 = np.column_stack((id_2,serial[b2]))
seq_list2 = generate_sequences(idx_sel_list2,gap_tol=5)
num1 = len(seq_list1)
for i in range(num1):
s1, s2 = seq_list1[i][0], seq_list1[i][1]
t_id = b1[s1:(s2+1)]
label1[t_id] = i+1
num2 = len(seq_list2)
for i in range(num2):
s1, s2 = seq_list2[i][0], seq_list2[i][1]
t_id = b2[s1:(s2+1)]
label1[t_id] = -(i+1)
label_ori[id1] = label1
data1_ori['label'] = label_ori
data1_ori = data1_ori.loc[:,colnames[0:5]+['label']+colnames[5:]]
data1_ori.to_csv(filename1,header=False,index=False,sep='\t',float_format='%.6f')
return data1_ori
# quantile of signals
# input: filename1: original RT signal
def region_quantile_signal(filename1):
data1_ori = pd.read_csv(filename1,header=None,sep='\t')
colnames = list(data1_ori)
chrom, start, stop = np.asarray(data1_ori[0]), np.asarray(data1_ori[1]), np.asarray(data1_ori[2])
sample_num = len(chrom)
id1 = np.where((chrom!='chrX')&(chrom!='chrY')&(chrom!='chrM'))[0]
data1 = data1_ori.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom, start, stop, signal = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2]), np.asarray(data1[4])
serial = np.asarray(data1[3])
sample_num1 = len(chrom)
print(sample_num1)
thresh = 0
ranking = stats.rankdata(signal,'average')/len(signal)
rank1 = np.zeros((sample_num1,4))
rank1[:,0] = ranking
b1 = np.where(signal>thresh)[0]
rank1[b1,2]= stats.rankdata(signal[b1],'average')/len(b1)
b2 = np.where(signal<=thresh)[0]
rank1[b2,2]= -stats.rankdata(-signal[b2],'average')/len(b2)
chrom_vec = np.unique(chrom)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
rank1[b1,1]= stats.rankdata(signal[b1],'average')/len(b1)
b2 = np.where(signal[b1]>thresh)[0]
b_2 = b1[b2]
rank1[b_2,3] = stats.rankdata(signal[b_2],'average')/len(b_2)
b2 = np.where(signal[b1]<=thresh)[0]
b_2 = b1[b2]
rank1[b_2,3] = -stats.rankdata(-signal[b_2],'average')/len(b_2)
rank_1 = np.zeros((sample_num,4))
rank_1[id1] = rank1
fields = ['q1','q2','q_1','q_2']
num2 = len(fields)
for i in range(num2):
data1_ori[5+i] = rank_1[:,i]
data1_ori.to_csv(filename1,index=False,header=False,sep='\t',float_format='%.7f')
return data1_ori
# quantile of signals
# input: filename1: original RT signal
def region_local_signal(filename1):
data1_ori = pd.read_csv(filename1,header=None,sep='\t')
colnames = list(data1_ori)
col1, col2, col3, col4, col5 = colnames[0], colnames[1], colnames[2], colnames[3], colnames[4]
chrom = np.asarray(data1_ori[col1])
fields = ['chrom','start','stop','serial','signal','q1','q2','q_1','q_2']
data1_ori = data1_ori.loc[:,colnames[0:len(fields)]]
sample_num = len(chrom)
id1 = np.where((chrom!='chrX')&(chrom!='chrY')&(chrom!='chrM'))[0]
data1 = data1_ori.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom, start, stop, signal = np.asarray(data1[col1]), np.asarray(data1[col2]), np.asarray(data1[col3]), np.asarray(data1[col5])
serial = np.asarray(data1[col4])
sample_num1 = len(serial)
seq_list = generate_sequences_chrom(chrom,serial)
value = signal
min_value1 = np.min(value)-0.1
print(sample_num1,np.max(value),np.min(value))
config = {'prominence_thresh':0,'distance_thresh':20,'width_thresh':20}
dict_2 = compare_with_regions_peak_search2(chrom,start,stop,serial,value,seq_list,thresh_vec=[min_value1],config=config)
fields = ['chrom','start','stop','serial','signal','q1','q2','q_1','q_2']
num1 = len(fields)
key_1 = list(dict_2.keys())
for i in range(2):
chrom_local,start_local,stop_local,serial_local,annot_local = dict_2[key_1[0]][i] # using wavelet transformation to find peaks
local_peak = [chrom_local,start_local,stop_local]
id2 = mapping_Idx(serial,serial_local)
assert np.sum(id2<0)==0
label = np.zeros(sample_num,dtype=np.int32)
n_local_peak = len(serial_local)
label[id1[id2]] = np.arange(n_local_peak)+1
data1_ori[num1+i] = label
colnames = list(data1_ori)
data1_ori = data1_ori.loc[:,colnames[0:(num1+2)]]
data1_ori.to_csv(filename1,index=False,header=False,sep='\t',float_format='%.7f')
return data1_ori
# filter regions by signal
# input: filename1: orignal RT data
# filename_list: region file
# filename_list1: RT estimation file
def region_filter_1(filename1,filename_list,filename_list1):
fields = ['chrom','start','stop','serial','signal','label',
'q1','q2','q_1','q_2','local_peak1','local_peak2']
data1 = pd.read_csv(filename1,header=None,sep='\t')
print('region_filter_1',filename1)
print(list(data1))
if len(list(data1))<len(fields):
print('generate domain labels')
data1 = find_region_sub3_2(filename1)
print(filename1)
print(filename_list,filename_list1)
colnames = list(data1)
print(colnames)
n_column = len(colnames)
if n_column>len(fields):
fields = fields + list(range(n_column-len(fields)))
data1.columns = fields
print(list(data1))
num1 = len(filename_list)
serial = np.asarray(data1['serial'])
signal_percentile = np.asarray(data1['q_1'])
local_peak = np.asarray(data1['local_peak2'])
print(np.max(local_peak))
peak_serial = serial[local_peak>0]
thresh1 = 0.1
thresh2 = 20
for i in range(num1):
filename2 = filename_list[i]
filename2_1 = filename_list1[i]
print(filename2,filename2_1)
data2 = pd.read_csv(filename2,sep='\t')
chrom1, serial1 = np.asarray(data2['chrom']), np.asarray(data2['serial'])
start1 = np.asarray(data2['start'])
sample_num1 = len(chrom1)
data2_1 = pd.read_csv(filename2_1,sep='\t')
chrom2, serial2 = np.asarray(data2_1['chrom']), np.asarray(data2_1['serial'])
signal, predicted_signal = np.asarray(data2_1['signal']), np.asarray(data2_1['predicted_signal'])
id1 = mapping_Idx(serial,serial1)
t_id1 = np.where(id1<0)[0]
print(len(t_id1),serial1[t_id1],chrom1[t_id1],start1[t_id1])
assert np.sum(id1<0)==0
t_id2 = np.where(id1>=0)[0]
id1 = id1[t_id2]
t1 = signal_percentile[id1]
# b1 = np.where((t1>0))[0]
b2 = np.where((t1<thresh1))[0]
num2 = len(b2)
# vec1 = np.zeros(num2)
vec1 = np.zeros(sample_num1)
for i1 in range(sample_num1):
vec1[i1] = np.min(np.abs(peak_serial-serial1[i1]))
id_1 = np.where(vec1>thresh2)[0]
id_2 = np.intersect1d(id_1,b2)
temp1 = np.asarray([num2,len(id_1),len(id_2)])
print(temp1,temp1/sample_num1)
label_1 = -2*np.ones(sample_num1,dtype=np.float32)
label_3 = np.zeros(sample_num1,dtype=np.int32)
label_2 = np.ones(sample_num1,dtype=np.float32)
# label_1[b2] = t1[b2]
label_1[t_id2] = t1
label_3= vec1
id2 = mapping_Idx(serial2,serial1)
assert np.sum(id2<0)==0
rank1 = stats.rankdata(signal[id2],'average')/sample_num1
rank2 = stats.rankdata(predicted_signal[id2],'average')/sample_num1
label_2 = rank2-rank1
data2['label_1'] = label_1
data2['distance'] = np.int32(label_3)
data2['difference'] = label_2
data2.to_csv(filename2,index=False,sep='\t',float_format='%.7f')
return True
# overlaping of regions
# find overlapping regions
# input: data1: position file 1
# data2: position file 2
# mode: 0, for each position in file 1, find all positions in file 2 overlapping with this position
# mode: 1, for each posiiton in file 1, find position in flie 2 that has the longest overlap with this position
def overlapping_with_regions_sub1(data1,data2,tol=0,mode=0):
colnames1 = list(data1)
chrom1, start1, stop1 = np.asarray(data1[colnames1[0]]), np.asarray(data1[colnames1[1]]), np.asarray(data1[colnames1[2]])
num1 = len(chrom1)
colnames2 = list(data2)
chrom2, start2, stop2 = np.asarray(data2[colnames2[0]]), np.asarray(data2[colnames2[1]]), np.asarray(data2[colnames2[2]])
num2 = len(chrom2)
chrom_vec1 = np.unique(chrom1)
chrom_vec2 = np.unique(chrom2)
dict1, dict2 = dict(), dict()
for chrom_id in chrom_vec1:
dict1[chrom_id] = np.where(chrom1==chrom_id)[0]
for chrom_id in chrom_vec2:
dict2[chrom_id] = np.where(chrom2==chrom_id)[0]
id_vec1, id_vec2 = [], []
region_len1 = stop1-start1
region_len2 = stop2-start2
cnt = 0
for chrom_id in chrom_vec1:
if not(chrom_id in chrom_vec2):
continue
id1 = dict1[chrom_id]
id2 = dict2[chrom_id]
print(chrom_id,len(id1),len(id2))
for t_id in id1:
t_chrom1, t_start1, t_stop1 = chrom1[t_id], start1[t_id], stop1[t_id]
t_start1, t_stop1 = t_start1-tol, t_stop1+tol
b2 = np.where((start2[id2]<t_stop1)&(stop2[id2]>t_start1))[0]
if len(b2)>0:
id_vec1.append(t_id)
t1 = id2[b2]
if mode==0:
id_vec2.append(t1)
else:
overlap = []
for t_id2 in t1:
temp1 = np.min([t_stop1-start2[t_id2],stop2[t_id2]-t_start1,region_len1[t_id],region_len2[t_id2]])
overlap.append(temp1)
id_1 = np.argmax(overlap)
id_vec2.append(t1[id_1])
id_vec1 = np.asarray(id_vec1)
id_vec2 = np.asarray(id_vec2)
return id_vec1, id_vec2
# overlaping of different runs
def find_region_sub3_overlapping(filename_list,tol=0, thresh1=2):
filename1 = filename_list[0]
data1 = pd.read_csv(filename1,sep='\t')
label1 = np.asarray(data1['label1'])
id1 = np.where(label1>0)[0]
data1 = data1.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
chrom1, start1, stop1 = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop'])
region_num1 = len(id1)
num1 = len(filename_list)
bin_size = 5000
tol1 = tol*bin_size
list1, list2 = [], []
label = np.zeros((region_num1,num1-1),dtype=np.int8)
for i in range(1,num1):
filename2 = filename_list[i]
data2 = pd.read_csv(filename2,sep='\t')
label2 = np.asarray(data2['label1'])
id2 = np.where(label2>0)[0]
data2 = data2.loc[id2,:]
data2.reset_index(drop=True,inplace=True)
chrom2, start2, stop2 = np.asarray(data2['chrom']), np.asarray(data2['start']), np.asarray(data2['stop'])
region_num2 = len(id2)
print(region_num1,region_num2)
id_vec1, id_vec2 = overlapping_with_regions_sub1(data1,data2,tol=tol1)
list1.append(id_vec1)
list2.append(id_vec2)
label[id_vec1,i-1] = 1
d1 = np.sum(label,axis=1)
if num1==2:
thresh1 = 0
id_1 = np.where(d1>thresh1)[0]
d2 = np.sum(label,axis=0)
print(region_num1,d2,d2/region_num1)
data1['overlap'] = (d1+1)
for i in range(1,num1):
data1[i+1] = label[:,i-1]
data1 = data1.loc[id_1,:]
data1.reset_index(drop=True,inplace=True)
b1 = filename1.find('.txt')
output_filename1 = filename1[0:b1]+'.%d_%d.txt'%(tol,thresh1+2)
data1.to_csv(output_filename1,index=False,sep='\t')
return data1, output_filename1
# compare with RT state
# input: filename1: original RT signal with state estimation
# filename_list: list of RT estimation file on genome 2 and output filename
def compare_RT_sub1(filename_ori,filename_list,thresh=-10,config={}):
# filename_centromere = config['filename_centromere']
# file_path = config['file_path']
# feature_id, interval, sel_id = config['feature_id'], config['interval'], config['sel_id']
data1 = pd.read_csv(filename_ori,header=None,sep='\t')
colnames = list(data1)
# print(colnames)
# data1 = data1.loc[:,colnames[0:6]+colnames[7:]]
# data1.to_csv(filename_ori,index=False,header=False,sep='\t',float_format='%.7f')
# print(data1.shape)
fields = ['chrom','start','stop','serial','signal','domain','q1','q2','q_1','q_2','local_peak1','local_peak2',
'state','group','group1']
if len(colnames)<len(fields):
print('state does not exist')
return -1
data1.columns = fields
chrom = np.asarray(data1['chrom'])
id1 = np.where((chrom!='chrX')&(chrom!='chrY')&(chrom!='chrM'))[0]
data1 = data1.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
signal_ori = np.asarray(data1['signal'])
print(data1.shape)
thresh1 = thresh
if thresh>0:
thresh1 = np.quantile(signal_ori,thresh)
print(np.max(signal_ori),np.min(signal_ori),thresh1)
if thresh1>-10:
id2 = np.where(signal_ori>thresh1)[0]
data1 = data1.loc[id2,:]
data1.reset_index(drop=True,inplace=True)
print(data1.shape)
chrom, start, stop, serial_ori = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
signal_ori = np.asarray(data1['signal'])
state, group, group1 = np.asarray(data1['state']), np.asarray(data1['group']), np.asarray(data1['group1'])
sel_id, filename_centromere = config['sel_id'], config['filename_centromere']
num1 = len(filename_list)
list1 = []
for i in range(num1):
filename2, annot1 = filename_list[i]
ref_data = (chrom,start,stop,serial_ori,signal_ori,[state,group,group1])
data_1 = compare_RT_sub2(ref_data,filename2,sel_id=sel_id,filename_centromere=filename_centromere,annot=annot1)
list1.append(data_1)
data_2 = pd.concat(list1, axis=0, join='outer', ignore_index=True,
keys=None, levels=None, names=None, verify_integrity=False, copy=True)
output_filename = config['output_filename']
data_2.to_csv(output_filename,index=False,sep='\t',float_format='%.7f')
return data_2
# compare with RT state
# input:chrom,start,stop,serial: positions of mapped RT estimation regions
# filename2: RT estimation score
def compare_RT_sub2(ref_data,filename2,sel_id='Q2',thresh=-10,filename_centromere='',annot=''):
chrom,start,stop,serial,signal,label = ref_data
state, group_id, group_id1 = label
# state = state[state>0]
# group_id = group_id[group_id!='-1']
# group_id1 = group_id[group_id1!='-2']
state_vec, group, group1 = np.unique(state), np.unique(group_id), np.unique(group_id1)
state_num, group_num, group_num1 = len(state_vec), len(group), len(group1)
data2 = pd.read_csv(filename2,sep='\t')
if filename_centromere!='':
data2 = region_filter(data2,filename_centromere)
chrom2 = np.asarray(data2['chrom'])
serial2 = np.asarray(data2['serial'])
chrom_vec, chrom_vec2 = np.unique(chrom), np.unique(chrom2)
id1 = mapping_Idx(serial,serial2)
b1 = np.where(id1>=0)[0]
id1 = id1[b1]
data2 = data2.loc[b1,:]
data2.reset_index(drop=True,inplace=True)
chrom2, start2, stop2, predicted_attention = np.asarray(data2['chrom']), np.asarray(data2['start']), np.asarray(data2['stop']), np.asarray(data2[sel_id])
serial2 = np.asarray(data2['serial'])
state, group_id, group_id1 = state[id1], group_id[id1], group_id1[id1]
sample_num = len(chrom2)
vec1 = np.zeros((state_num+1,3))
for i in range(state_num):
t_state = state_vec[i]
b1 = np.where(state==t_state)[0]
t1 = predicted_attention[b1]
vec1[i+1] = [len(b1),np.mean(t1),np.std(t1)]
t1 = vec1[1:(state_num+1),0]
vec1[1:(state_num+1),0] = t1/sample_num
vec2, vec3 = np.zeros((group_num,3)), np.zeros((group_num1,3))
for i in range(group_num):
b1 = np.where(group_id==group[i])[0]
t1 = predicted_attention[b1]
vec2[i] = [len(b1),np.mean(t1),np.std(t1)]
vec2[:,0] = vec2[:,0]/sample_num
for i in range(group_num1):
b1 = np.where(group_id1==group1[i])[0]
t1 = predicted_attention[b1]
vec3[i] = [len(b1),np.mean(t1),np.std(t1)]
vec3[:,0] = vec3[:,0]/sample_num
# mean_value = np.hstack((state_vec[:,np.newaxis],vec1[1:]))
mean_value1, std1 = np.mean(predicted_attention), np.std(predicted_attention)
print(thresh,mean_value1,std1)
vec1[0] = [sample_num,mean_value1,std1]
t1 = np.vstack((vec1,vec2,vec3))
fields = ['run_id','label','percentage','mean_value','std']
num1 = len(fields)
data_1 = pd.DataFrame(columns=fields)
data_1['run_id'] = [annot]*len(t1)
data_1['label'] = [-2]+list(state_vec)+list(group)+list(group1)
for i in range(2,num1):
t_column = fields[i]
data_1[fields[i]] = t1[:,i-2]
return data_1
# compare with rmsk
def compare_rmsk_sub1_pre(filename1,filename2,output_filename):
data1 = pd.read_csv(filename1,sep='\t')
# bin, swScore, genoName, genoStart, genoEnd, strand, repName, repClass, repFamily
colnames = list(data1)
data2 = data1.loc[:,['genoName','genoStart','genoEnd','strand','repName','repClass','repFamily']]
repName = np.unique(data2['repName'])
repClass = np.unique(data2['repClass'])
repFamily = np.unique(data2['repFamily'])
print(data2.shape)
b1 = filename1.find('.txt')
output_filename1 = filename1[0:b1]+'.1.txt'
data2.to_csv(output_filename1,index=False,header=False,sep='\t')
output_filename1 = filename1[0:b1]+'.repName.txt'
np.savetxt(output_filename1,repName,fmt='%s',delimiter='\t')
output_filename1 = filename1[0:b1]+'.repClass.txt'
np.savetxt(output_filename1,repClass,fmt='%s',delimiter='\t')
output_filename1 = filename1[0:b1]+'.repFamily.txt'
np.savetxt(output_filename1,repFamily,fmt='%s',delimiter='\t')
return data2
# compare with rmsk
# find overlapping of each rmsk family with each genomic locus
# input: filename1: rmsk file
# filename2: genomic loci file
def compare_rmsk_sub1(filename1,filename2,output_file_path,chrom_num=22):
fields = ['genoName','genoStart','genoEnd','strand','repName','repClass','repFamily']
data1 = pd.read_csv(filename1,header=None,sep='\t',names=fields)
# bin, swScore, genoName, genoStart, genoEnd, strand, repName, repClass, repFamily
colnames = list(data1)
# data_1 = data1.loc[:,['genoName','genoStart','genoEnd','strand','repName','repClass','repFamily']]
repName, repClass, repFamily = np.asarray(data1['repName']), np.asarray(data1['repClass']), np.asarray(data1['repFamily'])
repName_vec, repClass_vec, repFamily_vec = np.unique(repName), np.unique(repClass), np.unique(repFamily)
print(data1.shape)
chrom_vec1 = np.arange(1,chrom_num+1)
chrom_vec = ['chr%d'%(i) for i in chrom_vec1]
chrom_vec = np.asarray(chrom_vec)
chrom, start, stop = np.asarray(data1[colnames[0]]), np.asarray(data1[colnames[1]]), np.asarray(data1[colnames[2]])
data2 = pd.read_csv(filename2,header=None,sep='\t')
ref_chrom, ref_start, ref_stop = np.asarray(data2[0]), np.asarray(data2[1]), np.asarray(data2[2])
serial = np.asarray(data2[3])
colnames2 = list(data2)
region_len1 = stop-start
region_len2 = ref_stop-ref_start
chrom_dict1 = dict()
chrom_dict2 = dict()
region_num1, region_num2 = 0, 0
for i in range(chrom_num):
t_chrom = 'chr%d'%(i+1)
b1 = np.where(chrom==t_chrom)[0]
chrom_dict1[t_chrom] = b1
region_num1 += len(b1)
b2 = np.where(ref_chrom==t_chrom)[0]
chrom_dict2[t_chrom] = b2
region_num2 += len(b2)
print(t_chrom,len(b1),len(b2))
print(region_num1, region_num2)
print('repFamily',len(repFamily),repFamily)
repFamily_dict = dict()
list1 = []
for t_repFamily in repFamily_vec:
b1 = np.where(repFamily==t_repFamily)[0]
print(t_repFamily,len(b1))
t_chrom, t_start, t_stop = chrom[b1], start[b1], stop[b1]
t_repClass = repClass[b1]
list1.append([t_repFamily,t_repClass[0]])
list2 = []
for t_chrom1 in chrom_vec:
id1 = np.where(t_chrom==t_chrom1)[0]
if len(id1)==0:
continue
id2 = chrom_dict2[t_chrom1]
print(t_repFamily,t_chrom1,len(id1))
for t_id1 in id1:
t_start1, t_stop1 = t_start[t_id1], t_stop[t_id1]
b2 = np.where((ref_start[id2]<t_stop1)&(ref_stop[id2]>t_start1))[0]
if len(b2)>0:
b2 = id2[b2]
for t_id2 in b2:
overlap = np.min([t_stop1-ref_start[t_id2],ref_stop[t_id2]-t_start1,t_stop1-t_start1,region_len2[t_id2]])
list2.append([serial[t_id2],overlap,t_start1,t_stop1])
if len(list2)==0:
continue
list2 = np.asarray(list2)
b_1 = t_repFamily.find('?')
if b_1>=0:
t_repFamily1 = t_repFamily[0:b_1]+'_sub1'
else:
t_repFamily1 = t_repFamily
t_repClass1 = t_repClass[0]
b_2 = t_repClass1.find('?')
if b_2>=0:
t_repClass1 = t_repClass1[0:b_2]+'_sub1'
id_1 = mapping_Idx(serial,list2[:,0])
assert np.sum(id1<0)==0
t_data2 = data2.loc[id_1,:]
t_overlap = list2[:,1]
t_data2['overlap'] = t_overlap
t_data2['start1'], t_data2['stop1'] = list2[:,2], list2[:,3]
t_data2 = t_data2.sort_values(by=[colnames2[3]])
print(np.max(t_overlap),np.min(t_overlap),np.median(t_overlap))
output_filename1 = '%s/%s.%s.overlap.txt'%(output_file_path,t_repFamily1,t_repClass1)
t_data2.to_csv(output_filename1,index=False,header=False,sep='\t')
output_filename2 = '%s/repName_repFamily.txt'%(output_file_path)
data_1 = pd.DataFrame(columns=['repFamily','repClass'],data=list1)
data_1.to_csv(output_filename2,index=False,sep='\t')
return data_1
def region_filter(data1,filename_centromere):
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
id1 = select_idx_centromere(chrom,start,stop,filename_centromere)
print('select_idx_centromere', len(chrom), len(id1), len(id1)/len(chrom))
data1 = data1.loc[id1,:]
data1.reset_index(drop=True,inplace=True)
return data1
# input is probability
class Sample_Concrete1(Layer):
"""
Layer for sample Concrete / Gumbel-Softmax variables.
"""
def __init__(self, tau0, k, n_steps, type_id, **kwargs):
# def __init__(self, tau0, k, n_steps):
self.tau0 = tau0
self.k = k
self.n_steps = n_steps
self.type_id = type_id
super(Sample_Concrete1, self).__init__(**kwargs)
def call(self, logits):
logits_ = K.permute_dimensions(logits, (0,2,1))
#[batchsize, 1, MAX_SENTS]
unif_shape = tf.shape(logits_)[0]
uniform = tf.random.uniform(shape =(unif_shape, self.k, self.n_steps),
minval = np.finfo(tf.float32.as_numpy_dtype).tiny,
maxval = 1.0)
gumbel = - K.log(-K.log(uniform))
eps = tf.compat.v1.keras.backend.constant(1e-12)
# print('eps:', eps)
# noisy_logits = (gumbel + logits_)/self.tau0
# logits_ = K.log(logits_) # the input is probability
if self.type_id==2:
logits_ = -K.log(-K.log(logits_ + eps)) # the input is probability
elif self.type_id==3:
logits_ = K.log(logits_ + eps) # the input is probability
# elif self.type_id==5:
# logits_ = -logits_
elif self.type_id==5:
eps1 = tf.compat.v1.keras.backend.constant(1+1e-12)
# x = Lambda(lambda x: x * 2)(layer)
logits_ = K.log(logits_ + eps1)
else:
pass
noisy_logits = (gumbel + logits_)/self.tau0
samples = K.softmax(noisy_logits)
samples = K.max(samples, axis = 1)
samples = K.expand_dims(samples, -1)
discrete_logits = K.one_hot(K.argmax(logits_,axis=-1), num_classes = self.n_steps)
discrete_logits = K.permute_dimensions(discrete_logits,(0,2,1))
# return K.in_train_phase(samples, discrete_logits)
return samples
def compute_output_shape(self, input_shape):
return input_shape
def filter_region_signal_sub2_align(region_data1,region_data2,thresh_vec=[0.5]):
# species 1
# chrom1,start1,stop1,serial1 = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
# signal1 = np.asarray(data1['signal'])
# sample_num1 = len(chrom1)
colnames = list(region_data1)
region_chrom1, region_start1, region_stop1 = np.asarray(region_data1[colnames[0]]), np.asarray(region_data1[colnames[1]]), np.asarray(region_data1[colnames[2]])
region_serial1 = np.asarray(region_data1[colnames[3]])
region_label1 = np.asarray(region_data1['label'])
region_num1 = len(region_chrom1)
print(region_num1)
# species 2
# chrom2,start2,stop2,serial2 = np.asarray(data2['chrom']), np.asarray(data2['start']), np.asarray(data2['stop']), np.asarray(data2['serial'])
# signal2 = np.asarray(data2['signal'])
# sample_num2 = len(chrom2)
colnames = list(region_data2)
region_chrom2, region_start2, region_stop2 = np.asarray(region_data2[colnames[0]]), np.asarray(region_data2[colnames[1]]), np.asarray(region_data2[colnames[2]])
region_serial2 = np.asarray(region_data2[colnames[3]])
region_label2 = np.asarray(region_data2['label'])
region_num2 = len(region_chrom2)
print(region_num2)
print(region_serial1,region_serial2)
id_vec1 = np.zeros(region_num2,dtype=bool)
region_chrom_1 = np.asarray(['chr22']*region_num2)
region_pos_1 = np.zeros((region_num2,2),dtype=np.int64)
region_label_1 = np.zeros(region_num2,dtype=np.float32)
thresh1 = 200
for i in range(region_num1):
t_serial = region_serial1[i]
t_label1 = region_label1[i]
b1 = np.where(region_serial2==t_serial)[0]
t_chrom2 = region_chrom2[b1]
t_label2 = region_label2[b1]
num1 = len(b1)
if num1>0:
# print(t_serial,num1)
region_chrom_1[b1] = region_chrom1[i]
region_pos_1[b1] = [region_start1[i],region_stop1[i]]
region_label_1[b1] = t_label1
for j in range(num1):
try:
t_chrom_id = t_chrom2[j]
t_chrom_id = int(t_chrom_id[3:])
except:
continue
id1 = b1[j]
if region_stop2[id1]-region_start2[id1]<thresh1:
continue
temp1 = (t_label1>0.5)&(t_label2[j]>0.5)
temp2 = (t_label1<0.5)&(t_label2[j]<0.5)
id_vec1[id1] = (temp1|temp2)
id1 = np.where(id_vec1>0)[0]
region_data2['chrom1'], region_data2['start1'], region_data2['stop1'] = region_chrom_1,region_pos_1[:,0],region_pos_1[:,1]
region_data2['label1'] = region_label_1
region_data2 = region_data2.loc[id1,:]
region_data2.reset_index(drop=True,inplace=True)
# region_data2.sort_values(by=[colnames[0]])
return region_data2
def filter_region_signal_sub1(data1,region_data,thresh_vec):
chrom,start,stop,serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
signal = np.asarray(data1['signal'])
sample_num = len(chrom)
colnames = list(region_data)
region_chrom, region_start, region_stop = np.asarray(region_data[colnames[0]]), np.asarray(region_data[colnames[1]]), np.asarray(region_data[colnames[2]])
region_num = len(region_chrom)
print(sample_num,region_num)
dict1 = dict()
dict2 = dict()
thresh_num = len(thresh_vec)
flag_vec = np.zeros((region_num,thresh_num),dtype=np.int8)
thresh_vec1 = np.quantile(signal,thresh_vec)
thresh_vec1 = list(thresh_vec1)+[np.max(signal)+1e-05]
t_vec1 = -np.ones((region_num,5),dtype=np.float32)
for i in range(region_num):
b2 = np.where((chrom==region_chrom[i])&(start<region_stop[i])&(stop>region_start[i]))[0]
if len(b2)>0:
t_value = [np.max(signal[b2]),np.min(signal[b2]),np.median(signal[b2]),np.mean(signal[b2])]
ranking = np.sum(signal<t_value[2])/sample_num
t_vec1[i] = t_value+[ranking]
for l in range(thresh_num):
thresh_1, thresh_2 = thresh_vec1[l], thresh_vec1[l+1]
if (t_value[2]>=thresh_1) and (t_value[2]<thresh_2):
flag_vec[i,l] = 1
for l in range(thresh_num):
thresh_1, thresh_2 = thresh_vec1[l], thresh_vec1[l+1]
id1 = np.where(flag_vec[:,l]==1)[0]
dict1[thresh_vec[l]] = id1
id2 = np.where((signal>=thresh_1)&(signal<thresh_2))[0]
print(id2)
dict2[thresh_vec[l]] = id2
print(thresh_1,thresh_2,len(id1),len(id1)/region_num)
print(thresh_1,thresh_2,len(id2),len(id2)/sample_num)
fields = ['max','min','median','mean','label']
num1 = len(fields)
region_data['serial'] = np.arange(1,region_num+1)
for i in range(num1):
t_id = fields[i]
region_data[t_id] = t_vec1[:,i]
# label1 = -np.ones(region_num,dtype=np.float32)
# for i in range(thresh_num):
# id1 = dict1[thresh_vec[i]]
# label1[id1] = thresh_vec[i]
# region_data['label'] = label1
return dict1, dict2, region_data
def filter_region_signal(filename1,region_filename,output_filename,thresh_vec):
data1 = pd.read_csv(filename1,sep='\t')
region_data = pd.read_csv(region_filename,header=None,sep='\t')
# thresh_vec = [0,0.25,0.5,0.75]
dict1, dict2, region_data = filter_region_signal_sub1(data1,region_data,thresh_vec)
region_data.to_csv(output_filename,index=False,sep='\t')
return True
def filter_region_signal_align(region_filename1,region_filename2,output_filename):
region_data1 = pd.read_csv(region_filename1,sep='\t')
region_data2_ori = pd.read_csv(region_filename2,sep='\t')
region_data2 = filter_region_signal_sub2_align(region_data1,region_data2_ori)
print(len(region_data2_ori),len(region_data2))
region_data2.to_csv(output_filename,index=False,header=False,sep='\t')
def find_serial(chrom,chrom_num,chrom_vec=[],type_id=0):
if len(chrom_vec)==0:
for i in range(1,chrom_num+1):
chrom_vec.append('chr%d'%(i))
if type_id==1:
chrom_vec += ['chrX']
chrom_vec += ['chrY']
serial_vec = []
for t_chrom in chrom_vec:
b1 = np.where(chrom==t_chrom)[0]
if len(b1)>0:
serial_vec.extend(b1)
print(len(chrom),chrom_vec,len(serial_vec))
return np.asarray(serial_vec)
# training and test chromosome list
def find_list(train_vec,chrom_vec,test_vec=[]):
vec3_pre = train_vec
vec3, vec3_1 = [], []
if len(test_vec)==0:
for t_value1 in vec3_pre:
t_list1_1, t_list2_1 = [],[]
for t_list1 in t_value1:
str1 = [str(t1) for t1 in t_list1]
t_list1_1.append(','.join(str1))
t_list2 = np.sort(list(set(chrom_vec)-set(t_list1)))
str2 = [str(t2) for t2 in t_list2]
t_list2_1.append(','.join(str2))
vec3.append(t_list1_1)
vec3_1.append(t_list2_1)
else:
vec3_pre_1 = test_vec
vec3, vec3_1 = [], []
for (t_value1,t_value2) in zip(vec3_pre,vec3_pre_1):
t_list1_1 = []
for t_list1 in t_value1:
str1 = [str(t1) for t1 in t_list1]
t_list1_1.append(','.join(str1))
t_list2_1 = []
for t_list2 in t_value2:
str2 = [str(t2) for t2 in t_list2]
t_list2_1.append(','.join(str2))
vec3.append(t_list1_1)
vec3_1.append(t_list2_1)
return vec3, vec3_1
def load_data_sub1(file_path,label_ID,sel_id1,id1):
label_id,label_serial,t_filename,local_id = label_ID[sel_id1]
# id1, t_filename = ID
t_filename1 = '%s/%s'%(file_path,t_filename)
start1 = time.time()
with h5py.File(t_filename1,'r') as fid:
# serial2 = fid["serial"][:]
x_train = fid["vec"][:]
#print(x_train.shape)
stop1 = time.time()
print(id1,sel_id1)
return (x_train,sel_id1)
# load training data
def load_data_1(file_path,label_ID,id_vec):
queue1 = mp.Queue()
num1 = len(id_vec)
print("processes")
start = time.time()
processes = [mp.Process(target=load_data_sub1,
args=(file_path,label_ID,id_vec[id1],id1)) for id1 in range(num1)]
# Run processes
for p in processes:
p.start()
results = [queue1.get() for p in processes]
print(len(results))
# Exit the completed processes
print("join")
for p in processes:
p.join()
end = time.time()
print("use time load vectors: %s %s %s"%(start, end, end-start))
list1 = results
return list1
# load kmer frequency feature
def prep_data_sequence_kmer(filename1,kmer_size,output_filename=''):
kmer_dict1 = kmer_dict(kmer_size)
f_list, f_mtx, serial = load_seq_2(filename1, kmer_size, kmer_dict1, sel_idx=[])
return f_list, serial
# load kmer frequency feature
def prep_data_sequence_kmer_chrom(filename1,filename2,kmer_size,chrom_vec=[],filename_prefix='',save_mode=1,region_size=1):
kmer_dict1 = kmer_dict(kmer_size)
file1 = pd.read_csv(filename1,sep='\t')
seq1 = np.asarray(file1['seq'])
serial1 = np.asarray(file1['serial'])
file2 = pd.read_csv(filename2,header=None,sep='\t')
chrom, start, stop, ref_serial = np.asarray(file2[0]), np.asarray(file2[1]), np.asarray(file2[2]), np.asarray(file2[3])
n1, n2 = len(serial1), len(ref_serial)
if n1!=n2:
print('error!',n1,n2)
return
b1 = (serial1!=ref_serial)
count1 = np.sum(b1)
if count1 > 0:
print('error!',count1,n1,n2)
return
list1 = []
chrom_num = len(chrom_vec)
for chrom_id in chrom_vec:
sel_idx = np.where(chrom=='chr%d'%(chrom_id))[0]
if len(sel_idx)==0:
continue
t_serial = ref_serial[sel_idx]
id1 = mapping_Idx(serial1,t_serial)
b1 = (id1>=0)
n1,n2 = np.sum(b1), len(t_serial)
if n1!=n2:
print('error!',chrom_id,n1,n2)
sel_idx = id1[b1]
# f_list, chrom_id = load_seq_2_kmer1(seq1,serial1,kmer_size,kmer_dict1,chrom_id=chrom_id,sel_idx=sel_idx)
list1.append(sel_idx)
feature_dim = len(kmer_dict1)
num_region = len(serial1)
if region_size<=1:
f_list = np.zeros((num_region,feature_dim))
else:
num_subregion = int(np.ceil(len(seq1[0])/region_size))
f_list = np.zeros((num_region,num_subregion,feature_dim))
f_list = [None]*num_region
queue1 = mp.Queue()
# chrom_vec = range(20,22)
# chrom_vec = [1,9,10]
# chrom_vec = range(1,6)
print("processes")
start = time.time()
# processes = [mp.Process(target=self._compute_posteriors_graph_test, args=(len_vec, X, region_id,self.posteriors_test,self.posteriors_test1,self.queue)) for region_id in range(0,num_region)]
if region_size<2:
processes = [mp.Process(target=load_seq_2_kmer1,
args=(seq1, serial1, kmer_size, kmer_dict1, chrom_vec[i], list1[i], queue1,
filename_prefix, save_mode)) for i in range(chrom_num)]
else:
processes = [mp.Process(target=load_seq_2_kmer1_subregion,
args=(seq1, serial1, kmer_size, kmer_dict1, chrom_vec[i], list1[i], reigon_size,
queue1, filename_prefix, save_mode)) for i in range(chrom_num)]
# Run processes
for p in processes:
p.start()
results = [queue1.get() for p in processes]
print(len(results))
# Exit the completed processes
print("join")
for p in processes:
p.join()
end = time.time()
print("use time load chromosomes: %s %s %s"%(start, end, end-start))
chrom_num = len(chrom_vec)
chrom_vec1 = np.zeros(chrom_num)
output_filename_list = []
if save_mode==1:
for i in range(0,chrom_num):
vec1 = results[i]
chrom_id, sel_idx = vec1[0], vec1[1]
chrom_vec1[i] = chrom_id
if save_mode==1:
output_filename1 = vec1[-1]
output_filename_list.append(output_filename1)
# sorted_idx = np.argsort(chrom_vec1)
# output_filename_list = np.asarray(output_filename_list)
# output_filename_list = output_filename_list[sorted_idx]
with h5py.File(output_filename1,'r') as fid:
t_serial = fid["serial"][:]
fmtx = fid["vec"][:]
else:
t_serial, fmtx = vec1[2], vec1[3]
id1 = mapping_Idx(serial1,t_serial)
b1 = (id1!=sel_idx)
count1 = np.sum(b1)
if count1>0:
print('error!',chrom_id)
print(chrom_id,count1,len(sel_idx))
f_list[sel_idx] = fmtx
f_list = np.asarray(f_list)
print('kmer feature',f_list.shape)
# output_filename = '%s_kmer%d.h5'(filename_prefix,kmer_size)
# with h5py.File(output_filename,'w') as fid:
# fid.create_dataset("serial", data=ref_serial, compression="gzip")
# fid.create_dataset("vec", data=f_list, compression="gzip")
return f_list, serial1
# merge files
def test_1(filename_list,output_filename_list):
num1 = len(filename_list)
chrom_numList = []
for i in range(num1):
filename_list1 = filename_list[i]
output_filename1 = output_filename_list[i]
data2, t_chrom_numList = select_region1_merge(filename_list1,output_filename1,type_id1=0,type_id2=1)
chrom_numList.append(t_chrom_numList)
return chrom_numList
def train_test_index_chromosome(x,y,group_label,chrom_idvec,train_chromvec,test_chromvec,ratio=0.1):
id_train_1, id_test = [], []
for chrom_id in train_chromvec:
id1 = np.where(chrom_idvec==chrom_id)[0]
id_train_1.extend(id1)
for chrom_id in test_chromvec:
id1 = np.where(chrom_idvec==chrom_id)[0]
id_test.extend(id1)
id_train_1, id_test = np.asarray(id_train_1), np.asarray(id_test)
id_train, id_valid, y_train, y_valid, id_train1, id_valid1 = train_test_split_group(id_train_1,y[id_train_1],group_label[id_train_1],ratio=ratio)
return id_train, id_valid, id_test
def train_test_split_group(x,y,group_label,ratio=0.2):
group_label_vec = np.unique(group_label)
num1 = len(group_label_vec)
id1 = np.arange(num1)
id_1, id_2, y_train_group, y_valid_group = train_test_split(id1, group_label_vec, test_size=ratio, shuffle=True, random_state=42)
# num2 = x.shape[0]
# id_train_1 = np.zeros(num2,dtype=bool)
# for t_id in id_1:
# id_train_1 = id_train_1|(group_label==group_label_vec[t_id])
# id_train = np.where(id_train_1>0)[0]
id_train, id_valid = [], []
for t_id in id_1:
id_train.extend(np.where(group_label==group_label_vec[t_id])[0])
for t_id in id_2:
id_valid.extend(np.where(group_label==group_label_vec[t_id])[0])
id_train, id_valid = np.asarray(id_train), np.asarray(id_valid)
x_train, x_valid = x[id_train], x[id_valid]
y_train, y_valid = y[id_train], y[id_valid]
return x_train, x_valid, y_train, y_valid, id_train, id_valid
# merge files
def run_1_merge(run_idlist,config):
if 'file_path' in config:
file_path = config['file_path']
else:
file_path = './'
type_id, type_id1 = config['type_id_1'], config['type_id1_1']
feature_id1 = config['feature_id1']
filename_list, output_filename_list = [], []
num1 = len(run_idlist)
vec1 = np.zeros(num1,dtype=np.int32)
chrom_numList = []
for pair1 in run_idlist:
run_id1, run_id2, method1 = pair1
if 'filename_list1' in config:
print('filename_list1',config['filename_list1'][run_id1])
filename_list_1 = config['filename_list1'][run_id1]
filename_list1, output_filename = filename_list_1[0], filename_list_1[1]
else:
filename1 = '%s/feature_transform_%d_%d.1.txt'%(file_path,run_id1,method1)
filename2 = '%s/feature_transform_%d_%d.1.txt'%(file_path,run_id2,method1)
output_filename = '%s/test_vec2_%d_%d_[%d].%d_%d.1.txt'%(file_path,run_id1,method1,feature_id1,type_id,type_id1)
filename_list1 = [filename1,filename2]
filename_list.append(filename_list1)
output_filename_list.append(output_filename)
if os.path.exists(output_filename)==False:
data2, t_chrom_numList = select_region1_merge(filename_list1,output_filename,type_id1=0,type_id2=1)
else:
num1 = len(filename_list1)
t_chrom_numList = []
if type_id1==1:
vec1 = list(range(num1-1,-1,-1))
else:
vec1 = list(range(num1))
for i in vec1:
data_2 = pd.read_csv(filename_list1[i],sep='\t')
t_chrom_numList.append(np.unique(data_2['chrom']))
print(pair1,output_filename,t_chrom_numList)
chrom_numList.append(t_chrom_numList)
return filename_list, output_filename_list, chrom_numList
# merge estimation files
def test_merge_1(run_idlist,output_filename,config,mode=1):
if 'file_path' in config:
file_path = config['file_path']
else:
file_path = './'
# config.update({'type_id_1':1, 'type_id1_1':0, 'feature_id1':feature_id1})
# run_idlist = list(np.vstack((t_list1,t_list2,t_list3)).T)
cell_idtype, method1 = config['cell_type1'], config['method1']
filename_list, output_filename_list, chrom_numList = run_1_merge(run_idlist,config)
run_idlist = np.asarray(run_idlist)
run_idlist1 = run_idlist[:,0]
list1 = []
print(run_idlist1,output_filename_list,chrom_numList)
for (run_id,filename1,t_chrom_numList) in zip(run_idlist1,output_filename_list,chrom_numList):
config.update({'chrom_vec1_pre':t_chrom_numList[0]})
config.update({'cell_type1':cell_idtype,'method1':method1})
data1 = compute_mean_std(run_id, filename1, config)
print(run_id,t_chrom_numList,data1.shape)
list1.append(data1)
data_1 = pd.concat(list1, axis=0, join='outer', ignore_index=True,
keys=None, levels=None, names=None, verify_integrity=False, copy=True)
# if mode==0:
# data_1.to_csv(output_filename,index=False,sep='\t')
# else:
# data_pre = pd.read_csv(output_filename,sep='\t')
# data_2 = pd.concat([data_pre,data_1], axis=0, join='outer', ignore_index=True,
# keys=None, levels=None, names=None, verify_integrity=False, copy=True)
# data_2.to_csv(output_filename,index=False,sep='\t')
if (os.path.exists(output_filename)==True) and (mode==1):
data_pre = pd.read_csv(output_filename,sep='\t')
data_2 = pd.concat([data_pre,data_1], axis=0, join='outer', ignore_index=True,
keys=None, levels=None, names=None, verify_integrity=False, copy=True)
data_2.to_csv(output_filename,index=False,sep='\t',float_format='%.6f')
else:
data_1.to_csv(output_filename,index=False,sep='\t',float_format='%.6f')
return True
def table_format(filename1,pre_colnames=[],type_id1=0):
data1 = pd.read_csv(filename1,sep='\t')
colnames = list(data1)
if type_id1==0:
if 'method' in colnames:
local_colnames = colnames[4:]
else:
local_colnames = colnames[2:]
if 'train_chrNum' in colnames:
local_colnames = local_colnames[0:-1]
if len(pre_colnames)==0:
pre_colnames = list(np.setdiff1d(colnames,local_colnames,assume_unique=True))
else:
num1 = len(pre_colnames)
local_colnames = colnames[num1:]
data_1 = data1.loc[:,local_colnames]
data_1 = np.asarray(data_1)
data_2 = data1.loc[:,pre_colnames]
data_2 = np.asarray(data_2)
num_sample, sel_num = data_1.shape
vec1 = np.ravel(data_1)
vec2 = np.tile(local_colnames,num_sample)
print(data_1.shape,data_2.shape)
data_2 = np.repeat(data_2,sel_num,axis=0)
print(data_2.shape)
data3 = pd.DataFrame(columns=pre_colnames,data=data_2)
data3['value'] = vec1
data3['metrics'] = vec2
id1 = filename1.find('txt')
filename2 = filename1[0:id1]+'copy1.txt'
data3.to_csv(filename2,index=False,sep='\t',float_format='%.7f')
return True
def select_idx_centromere(chrom,start,stop,filename_centromere=''):
centromere = pd.read_csv(filename_centromere,header=None,sep='\t')
chrom1, start1, stop1 = np.asarray(centromere[0]), np.asarray(centromere[1]), np.asarray(centromere[2])
num1 = len(chrom1)
list1 = []
for i in range(num1):
t_chrom1, t_start1, t_stop1 = chrom1[i], start1[i], stop1[i]
b1 = np.where((chrom==t_chrom1)&(start<t_stop1)&(stop>t_start1))[0]
list1.extend(b1)
list1 = np.asarray(list1)
id1 = np.arange(len(chrom))
id1 = np.setdiff1d(id1,list1)
print(len(id1),len(list1),len(chrom1))
return id1
# input: estimated attention, type_id: training, validation, or test data
# output: ranking of attention
def select_region1_sub(filename,type_id,data1=[],filename_centromere=''):
if len(data1)==0:
data1 = pd.read_csv(filename,sep='\t')
colnames = list(data1)
if filename_centromere!='':
chrom, start, stop, serial = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['serial'])
id1 = select_idx_centromere(chrom,start,stop,filename_centromere)
print('select_idx_centromere', len(chrom), len(id1), len(id1)/len(chrom))
data1 = data1.loc[id1,:]
# data1.reset_index(drop=True,inplace=True)
data1.reset_index(drop=True,inplace=True)
# chrom start stop serial signal predicted_signal predicted_attention
chrom, start, serial = data1['chrom'], data1['start'], data1['serial']
chrom, start, serial = np.asarray(chrom), np.asarray(start), np.asarray(serial)
if 'predicted_attention' in data1:
predicted_attention = data1['predicted_attention']
else:
predicted_attention = np.zeros(len(chrom),dtype=np.float32)
predicted_attention = np.asarray(predicted_attention)
ranking = stats.rankdata(predicted_attention,'average')/len(predicted_attention)
rank1 = np.zeros((len(predicted_attention),2))
rank1[:,0] = ranking
flag1 = 0
if 'predicted_attention1' in colnames:
flag1 = 1
predicted_attention1 = np.asarray(data1['predicted_attention1'])
ranking1 = stats.rankdata(predicted_attention1,'average')/len(predicted_attention1)
rank_1 = np.zeros((len(predicted_attention1),2))
rank_1[:,0] = ranking1
chrom_vec = np.unique(chrom)
for t_chrom in chrom_vec:
b1 = np.where(chrom==t_chrom)[0]
t_attention = predicted_attention[b1]
t_ranking = stats.rankdata(t_attention,'average')/len(t_attention)
rank1[b1,1] = t_ranking
if flag1==1:
t_attention1 = predicted_attention1[b1]
t_ranking1 = stats.rankdata(t_attention1,'average')/len(t_attention1)
rank_1[b1,1] = t_ranking1
data1['Q1'] = rank1[:,0] # rank across all the included chromosomes
data1['Q2'] = rank1[:,1] # rank by each chromosome
data1['typeId'] = np.int8(type_id*np.ones(len(rank1)))
if flag1==1:
data1['Q1_1'] = rank_1[:,0] # rank across all the included chromosomes
data1['Q2_1'] = rank_1[:,1] # rank by each chromosome
t1 = np.hstack((rank1,rank_1))
data1['Q1(2)'] = np.max(t1[:,[0,2]],axis=1)
data1['Q2(2)'] = np.max(t1[:,[1,3]],axis=1)
return data1,chrom_vec
# merge estimated attention from different training/test splits
# type_id1: chromosome order; type_id2: training: 0, test: 1, valid: 2
# filename_centromere: centromere file
def select_region1_merge(filename_list,output_filename,type_id1=0,type_id2=1,filename_centromere=''):
list1 = []
chrom_numList = []
# b1 = np.where((self.chrom!='chrX')&(self.chrom!='chrY'))[0]
# ref_chrom, ref_start, ref_serial = self.chrom[b1], self.start[b1], self.serial[b1]
# num_sameple = len(ref_chrom)
i = 0
serial1 = []
num1 = len(filename_list)
vec1 = list(range(num1))
if type_id1==1:
vec1 = list(range(num1-1,-1,-1))
for i in vec1:
filename1 = filename_list[i]
# data1: chrom, start, stop, serial, signal, predicted_signal, predicted_attention, Q1, Q2, typeId
# typeId: training: 0, test: 1, valid: 2
data1, chrom_vec = select_region1_sub(filename1,type_id2,filename_centromere=filename_centromere)
print(filename1,len(data1))
# list1.append(data1)
# if i==0:
# serial1 = np.asarray(data1['serial'])
print(serial1)
t_serial = np.asarray(data1['serial'],dtype=np.int64)
t_serial2 = np.setdiff1d(t_serial,serial1)
serial1 = np.union1d(serial1,t_serial)
print(len(t_serial),len(t_serial2),len(serial1))
id1 = mapping_Idx(t_serial,t_serial2)
colnames = list(data1)
data1 = data1.loc[id1,colnames]
list1.append(data1)
chrom_numList.append(chrom_vec)
data2 = pd.concat(list1, axis=0, join='outer', ignore_index=True,
keys=None, levels=None, names=None, verify_integrity=False, copy=True)
print('sort')
data2 = data2.sort_values(by=['serial'])
data2.to_csv(output_filename,index=False,sep='\t',float_format='%.6f')
return data2, chrom_numList
# sample region without replacement
def sample_region_sub1(sample_num,sel_num,sample_weight,sel_ratio=1):
if sel_ratio!=1:
sel_num = int(sample_num*sel_ratio)
select_serial = -np.ones(sel_num,dtype=np.int32)
# prob1 = np.random.rand(sample_num)
# select_serial = np.where(prob1<=sample_weight)[0]
vec1 = np.asarray(range(0,sample_num))
limit1 = sel_num*100
i, cnt1 = 0, 0
while i < sel_num:
i1 = vec1[np.random.randint(0,sel_num-i)]
prob1 = np.random.rand()
cnt1 = cnt1+1
if prob1<=sample_weight[i1]:
select_serial[i] = i1
i = i+1
vec1 = np.setdiff1d(vec1,select_serial[0:i],assume_unique=True)
if cnt1 > limit1:
sorted_idx = np.argsort(-sample_weight[vec1])
select_serial[i:sel_num] = vec1[sorted_idx[i:sel_num]]
# while i < sel_num:
# i1 = vec1[np.random.randint(0,sel_num-i)]
# prob1 = np.random.rand()
# cnt1 = cnt1+1
# if prob1>=sample_weight[i1]:
# select_serial[i] = i1
# i = i+1
# vec1 = np.setdiff1d(vec1,select_serial[0:i],assume_unique=True)
# if cnt1 > limit1:
# sorted_idx = np.argsort(sample_weight[vec1])
# select_serial[i:sel_num] = vec1[sorted_idx[i:sel_num]]
return select_serial
# sample regions
# type_id: 0: sample using sel_num; 1: sample by each region
def sample_region(sample_weight,sel_num,thresh=0.6,sel_ratio=1,type_id=1,epsilon=0.15,thresh_1=0.9):
sample_num = len(sample_weight)
if sel_ratio!=1:
sel_num = int(sample_num*sel_ratio)
# random.seed(seed1)
# tf.compat.v1.set_random_seed(seed1)
# seed1 = 0
# np.random.seed(seed1)
if type_id==0:
select_serial = sample_region_sub1(sample_num,sel_num,sample_weight)
elif type_id==1:
prob1 = np.random.rand(sample_num)
print(np.max(sample_weight),np.min(sample_weight),np.mean(sample_weight),np.median(sample_weight))
select_serial = np.where(prob1<=sample_weight)[0]
# select_serial = np.where((prob1<=sample_weight)&(sample_weight>thresh))[0]
print(sample_num,len(select_serial))
b1 = np.where(sample_weight[select_serial]<thresh)[0]
num1 = len(b1)
prob2 = np.random.rand(num1)
b2 = np.where(prob2>epsilon)[0]
id2 = b1[b2]
serial2 = select_serial[id2]
select_serial = np.setdiff1d(select_serial,serial2)
thresh1 = thresh_1
b1 = np.where(sample_weight>thresh1)[0]
select_serial = np.union1d(select_serial,b1)
print(num1,len(serial2),len(b1),len(select_serial))
else:
prob1 = np.random.rand(sample_num)
select_serial = np.where(prob1<=sample_weight)[0]
t_sel_num = len(select_serial)
if t_sel_num<sel_num:
sample_num1 = sample_num-t_sel_num
sel_num1 = sel_num-t_sel_num
vec1 = np.setdiff1d(range(0,sample_num),select_serial,assume_unique=True)
sample_weight1 = sample_weight[vec1]
select_serial1 = sample_region_sub1(sample_num1,sel_num1,sample_weight1)
select_serial = np.union1d(select_serial,select_serial1)
else:
sample_weight1 = sample_weight[select_serial]
sorted_idx = np.argsort(-sample_weight1)
select_serial = select_serial[sorted_idx[0:sel_num]]
print('select_serial',len(select_serial),len(select_serial)/sample_num)
return select_serial
## evaluation
# compare estimated score with existing elements
# filename1: estimated attention
# filename2: ERCE
def compare_with_regions(filename1, filename2, output_filename, output_filename1, tol=2, filename1a=''):
data1 = pd.read_csv(filename1,sep='\t')
# data1a = pd.read_csv(filename1a,sep='\t')
colnames1 = list(data1)
chrom1, serial1 = np.asarray(data1['chrom']), np.asarray(data1['serial'])
# b1 = np.where(chrom1!='chr16')[0]
# data_1 = data1.loc[b1,colnames1]
# data3 = pd.concat([data_1,data1a], axis=0, join='outer', ignore_index=True,
# keys=None, levels=None, names=None, verify_integrity=False, copy=True)
data3 = data1
print(list(data3))
# data3.sort_values(by=['serial'])
num1 = data3.shape[0]
print(num1)
label1 = np.zeros(num1)
chrom1, start1, stop1 = data3['chrom'], data3['start'], data3['stop']
attention1 = data3[colnames1[-1]]
# load ERCE files
data2 = pd.read_csv(filename2,header=None,sep='\t')
colnames2 = list(data2)
col1, col2, col3 = colnames2[0], colnames2[1], colnames2[2]
chrom2, start2, stop2 = data2[col1], data2[col2], data2[col3]
num2 = len(chrom2)
score1 = -np.ones(num2)
for i in range(0,num2):
t_chrom, t_start, t_stop = chrom2[i], start2[i], stop2[i]
b1_ori = np.where((chrom1==t_chrom)&(start1<t_stop)&(stop1>t_start))[0]
if len(b1_ori)==0:
continue
# tolerance of the region
s1 = max(0,b1_ori[0]-tol)
s2 = min(len(chrom1),b1_ori[0]+tol+1)
b1 = list(range(s1,s2))
# b1 = np.where((chrom1==t_chrom)&(start1>=t_start)&(stop1<=t_stop))[0]
label1[b1_ori] = 1+i
# select the maximum score in a region
t_score = np.max(attention1[b1])
score1[i] = t_score
if i%100==0:
print(i,t_score)
data3['label'] = label1
data3.to_csv(output_filename,index=False,sep='\t')
data2['score'] = score1
# b1 = np.where(score1>0)[0]
# data2 = data2.loc[b1,list(data2)]
data2.to_csv(output_filename1,index=False,sep='\t')
return data2, data3
# sample regions randomly to compare with elements
def compare_with_regions_sub1(chrom1,start1,stop1,attention1,
sample_num,region_len,chrom_size,bin_size,tol):
start_pos = np.random.permutation(chrom_size-int(region_len/bin_size)-1)
start_pos = start1[start_pos+tol]
sel_num = attention1.shape[1]
vec2 = -np.ones((sample_num,2+2*sel_num))
# start_pos1 = start_pos[0:sample_num]
start_pos1 = start_pos
pos1 = np.vstack((start_pos1,start_pos1+region_len)).T
# print(chrom_size,region_len,region_len/bin_size)
# print(len(start_pos),len(pos1),pos1[0:2])
num1 = len(pos1)
cnt1 = 0
# attention_1 = attention1[:,0]
# attention_2 = attention1[:,1]
for i in range(0,num1):
t_pos = pos1[i]
len1 = (t_pos[1]-t_pos[0])/bin_size
# t_start2 = max(0,t_pos[0]-tol*bin_size)
# t_stop2 = min(stop1[-1],t_pos[1]+tol*bin_size)
t_start2, t_stop2 = t_pos[0], t_pos[1]
b1_ori = np.where((start1<t_stop2)&(stop1>t_start2))[0]
# print(t_pos,t_start2,t_stop2,(t_stop2-t_start2)/bin_size,len(b1_ori))
if len(b1_ori)<len1*0.5:
continue
# s1 = max(0,b1_ori[0]-tol)
# s2 = min(t_chrom_size,b1_ori[0]+tol+1)
# b1 = b2[s1:s2]
# vec2[cnt1] = np.max(attention_1[b1_ori])
t_vec2 = []
for l in range(sel_num):
t_vec2.extend([np.max(attention1[b1_ori,l]),np.mean(attention1[b1_ori,l])])
vec2[cnt1] = [t_start2,t_stop2]+t_vec2
cnt1 += 1
if cnt1>=sample_num:
break
vec2 = vec2[vec2[:,0]>=0]
return vec2
# input: data1: data that query regions from
# position: the positions to query
def query_region(data1,position,sel_column=[]):
colnames = list(data1)
chrom1, start1, stop1 = data1[colnames[0]], data1[colnames[1]], data1[colnames[2]]
chrom1, start1, stop1 = np.asarray(chrom1), np.asarray(start1), np.asarray(stop1)
vec1, vec2 = [], []
if len(sel_column)>0:
for t_sel_column in sel_column:
if not(t_sel_column in colnames):
print('column not found', t_sel_column)
return
else:
sel_column = colnames[0:2]
value1 = np.asarray(data1[sel_column])
vec2 = []
for t_position in position:
t_chrom, t_start, t_stop = t_position
b1 = np.where((chrom1==t_chrom)&(t_start<stop1)&(t_stop>start1))[0]
vec1.append(value1[b1])
vec2.append(b1)
return vec1, vec2
# sample regions randomly to compare with elements
def compare_with_regions_random1(filename1,filename2,output_filename,output_filename1,
output_filename2, tol=1, sample_num=200, type_id=1):
data1 = pd.read_csv(filename1,sep='\t')
# data1a = pd.read_csv(filename1a,sep='\t')
# colnames1 = list(data1)
# chrom1, serial1 = np.asarray(data1['chrom']), np.asarray(data1['serial'])
# b1 = np.where(chrom1!='chr16')[0]
# data_1 = data1.loc[b1,colnames1]
# data3 = pd.concat([data_1,data1a], axis=0, join='outer', ignore_index=True,
# keys=None, levels=None, names=None, verify_integrity=False, copy=True)
# load ERCE files
data2 = pd.read_csv(filename2,header=None,sep='\t')
colnames2 = list(data2)
col1, col2, col3 = colnames2[0], colnames2[1], colnames2[2]
chrom2, start2, stop2 = data2[col1], data2[col2], data2[col3]
data3 = data1
colnames1 = list(data3)
# print(colnames1)
# data3 = data3.sort_values(by=['serial'])
num1 = data3.shape[0]
print(num1)
label1 = np.zeros(num1,dtype=np.int32)
chrom1, start1, stop1 = np.asarray(data3['chrom']), np.asarray(data3['start']), np.asarray(data3['stop'])
quantile_vec1 = ['Q1','Q2']
# attention1 = np.asarray(data3['predicted_attention'])
# attention2 = np.asarray(data3[quantile_vec1[type_id]]) # ranking
sel_column = ['predicted_attention',quantile_vec1[type_id]]
attention1 = np.asarray(data3.loc[:,sel_column])
chrom_vec = np.unique(chrom2)
chrom_num = len(chrom_vec)
chrom_size1 = len(chrom1)
bin_size = stop1[1]-start1[1]
num2 = len(chrom2)
sel_num = 2
sel_num1 = 2*sel_num
score1 = -np.ones((num2,sel_num*6),dtype=np.float32)
vec1 = []
for t_chrom in chrom_vec:
# t_chrom = chrom_vec[i]
b1 = np.where(chrom2==t_chrom)[0]
num2 = len(b1)
b2 = np.where(chrom1==t_chrom)[0]
print(num2,len(b1),len(b2))
if len(b1)==0 or len(b2)==0:
print('chromosome not found', t_chrom)
continue
t_chrom_size = len(b2)
print('sample regions %d'%(sample_num),t_chrom_size)
for l in range(0,num2):
i1 = b1[l]
t_chrom, t_start, t_stop = chrom2[i1], start2[i1], stop2[i1]
t_chrom1, t_start1, t_stop1 = chrom1[b2], start1[b2], stop1[b2]
t_attention1 = attention1[b2]
print(t_stop,t_start)
region_len_ori = t_stop-t_start
t_start = max(0,t_start-tol*bin_size)
t_stop = min(t_stop1[-1],t_stop+tol*bin_size)
b1_ori = np.where((t_start1<t_stop)&(t_stop1>t_start))[0]
if len(b1_ori)==0:
continue
b1_ori = b2[b1_ori]
# s1 = max(0,b1_ori[0]-tol)
# s2 = min(chrom_size1,b1_ori[0]+tol+1)
# b1 = list(range(s1,s2))
# b1 = np.where((chrom1==t_chrom)&(start1>=t_start)&(stop1<=t_stop))[0]
label1[b1_ori] = 1+i1
# select the maximum score in a region
# t_score1 = np.max(attention1[b1_ori])
# t_score2 = np.mean(attention2[b1_ori])
# t_score1_1 = np.max(attention2[b1_ori])
# t_score2_2 = np.mean(attention2[b1_ori])
for l1 in range(sel_num):
id2 = 2*l1
score1[i1,id2:(id2+2)] = [np.max(attention1[b1_ori,l1]),np.mean(attention1[b1_ori,l1])]
# randomly sample regions
region_len = t_stop-t_start
t_chrom_size = len(b2)
# sample_num = 200
vec2 = compare_with_regions_sub1(t_chrom1,t_start1,t_stop1,t_attention1,
sample_num,region_len,t_chrom_size,bin_size,tol)
# vec2 = compare_with_regions_sub2(t_chrom1,t_start1,t_stop1,t_attention1,
# sample_num,region_len,t_chrom_size,bin_size,tol)
vec3 = []
print(vec2.shape)
num3 = vec2.shape[1]
# if num3!=sel_num1:
# print('error!',num3,sel_num1)
# return
assert num3==(sel_num1+2)
vec2_1 = vec2[:,2:]
for l1 in range(sel_num1):
t_score_mean1, t_score_std1 = np.mean(vec2_1[:,l1]), np.std(vec2_1[:,l1])
vec3.extend([t_score_mean1,t_score_std1])
sample_num1 = len(vec2)
score1[i1,sel_num1:] = vec3
t1 = np.asarray([1+i1]*sample_num1)
vec1.extend(np.hstack((t1[:,np.newaxis],vec2)))
# if i%100==0:
# print(i,score1[i],len(vec2))
print(i1,score1[i1],len(vec2),vec2.shape)
# if l>10:
# break
data3['label'] = label1
data3.to_csv(output_filename,index=False,sep='\t')
for l in range(sel_num1):
data2['score%d'%(l+1)] = score1[:,l]
id1 = sel_num1+2*l
data2['score_comp_mean%d'%(l+1)], data2['score_comp_std%d'%(l+1)] = score1[:,id1],score1[:,id1+1]
# data2['score'] = score1[:,0]
# data2['score_quantile'] = score1[:,1]
# data2['score_comp_mean'], data2['score_comp_std'] = score1[:,2],score1[:,3]
# data2['score_comp_mean1'], data2['score_comp_std1'] = score1[:,4],score1[:,5]
# b1 = np.where(score1>0)[0]
# data2 = data2.loc[b1,list(data2)]
data2.to_csv(output_filename1,index=False,sep='\t')
vec1 = np.asarray(vec1)
num1 = vec1.shape[1]
print(vec1.shape)
fields = ['region_id','start','stop']
data_1 = pd.DataFrame(columns=fields)
for i in range(3):
data_1[fields[i]] = np.int64(vec1[:,i])
for i in range(3,num1):
data_1['sel%d'%(i-2)] = vec1[:,i]
data_1.to_csv(output_filename2,index=False,sep='\t')
return True
# find overlapping regions
# input: data1: position file 1
# data2: position file 2
# mode: 0, for each position in file 1, find all positions in file 2 overlapping with this position
# mode: 1, for each posiiton in file 1, find position in flie 2 that has the longest overlap with this position
def overlapping_with_regions(data1,data2,mode=0):
colnames1 = list(data1)
chrom1, start1, stop1 = np.asarray(data1[colnames1[0]]), np.asarray(data1[colnames1[1]]), np.asarray(data1[colnames1[2]])
num1 = len(chrom1)
colnames2 = list(data2)
chrom2, start2, stop2 = np.asarray(data2[colnames2[0]]), np.asarray(data2[colnames2[1]]), np.asarray(data2[colnames2[2]])
num1 = len(chrom1)
id_vec1, id_vec2 = [], []
for i in range(num1):
t_chrom1, t_start1, t_stop1 = chrom1[i], start1[i], stop1[i]
b1 = np.where((chrom2==t_chrom1)&(start2<t_stop1)&(stop2>t_start1))[0]
if len(b1)>0:
id_vec1.append(i)
id_vec2.append(b1)
if i%1000==0:
print(t_chrom1,t_start1,t_stop1,t_stop1-t_start1)
id_vec1 = np.asarray(id_vec1)
id_vec2 = np.asarray(id_vec2)
return id_vec1, id_vec2
# sample regions randomly to compare with elements
# input: filename1: estimation file
# filename2: ERCE file
# output_filename: save ERCE for the cell type
# tol: tolerance for extension of the ERCE region
# label_name: label of genomic loci overlapping with ERCE
# return: data3: estimation file, data2: ERCE
def compare_with_regions_pre(filename1,filename2,output_filename='',tol=2,
label_name='label',save_mode=0,region_data=[],
select_id=0,config={}):
# load genomic loci file
data1 = pd.read_csv(filename1,sep='\t')
# label_name = 'label1'
# load ERCE files
if len(region_data)==0:
# data2 = pd.read_csv(filename2,header=None,sep='\t')
data2 = pd.read_csv(filename2,sep='\t')
colnames = list(data2)
if colnames[0]!='chrom':
data2 = pd.read_csv(filename2,header=None,sep='\t')
else:
data2 = region_data
colnames2 = list(data2)
col1, col2, col3 = colnames2[0], colnames2[1], colnames2[2]
chrom2, start2, stop2 = np.asarray(data2[col1]), np.asarray(data2[col2]), np.asarray(data2[col3])
data3 = data1
colnames1 = list(data3)
num1 = data3.shape[0]
print(num1,colnames1)
label1 = np.zeros(num1,dtype=np.int64)
label2 = np.zeros(num1,dtype=np.int64)
chrom1, start1, stop1 = np.asarray(data3['chrom']), np.asarray(data3['start']), np.asarray(data3['stop'])
if os.path.exists(output_filename):
data3_1 = pd.read_csv(output_filename,sep='\t')
t_chrom, t_start = data3_1['chrom'], data3_1['start']
print(output_filename)
print(len(chrom1),len(t_chrom))
b1 = (chrom1!=t_chrom)
b2 = (start1!=t_start)
if np.sum(b1)>0 or np.sum(b2)>0:
print('error!')
return
data3[label_name] = data3_1[label_name].copy()
label_name1 = label_name+'_tol%d'%(tol)
data3[label_name1] = data3_1[label_name1].copy()
return data3, data2
chrom_vec = np.unique(chrom2)
chrom_num = len(chrom_vec)
chrom_size1 = len(chrom1)
bin_size = stop1[1]-start1[1]
num2 = len(chrom2)
sel_num = 2
sel_num1 = 2*sel_num
score1 = -np.ones((num2,sel_num*6),dtype=np.float32)
vec1 = []
for t_chrom in chrom_vec:
# t_chrom = chrom_vec[i]
b1 = np.where(chrom2==t_chrom)[0]
num2 = len(b1)
b2 = np.where(chrom1==t_chrom)[0]
# print(num2,len(b1),len(b2))
if len(b1)==0 or len(b2)==0:
print('chromosome not found', t_chrom)
continue
t_chrom_size = len(b2)
print(t_chrom, t_chrom_size)
for l in range(0,num2):
i1 = b1[l]
t_chrom, t_start, t_stop = chrom2[i1], start2[i1], stop2[i1]
t_chrom1, t_start1, t_stop1 = chrom1[b2], start1[b2], stop1[b2]
# print(t_stop,t_start)
region_len_ori = t_stop-t_start
start_1 = max(0,t_start-tol*bin_size)
stop_1 = min(t_stop1[-1],t_stop+tol*bin_size)
b1_ori = np.where((t_start1<stop_1)&(t_stop1>start_1))[0]
if len(b1_ori)==0:
continue
b1_ori = b2[b1_ori]
label1[b1_ori] = 1+i1
# print(i1)
start_1 = max(0,t_start)
stop_1 = min(t_stop1[-1],t_stop)
b1_ori = np.where((t_start1<stop_1)&(t_stop1>start_1))[0]
if len(b1_ori)==0:
continue
b1_ori = b2[b1_ori]
label2[b1_ori] = 1+i1
# if l>10:
# break
# data3['label'] = label1
data3[label_name] = label2
label_name1 = '%s_tol%d'%(label_name,tol)
data3[label_name1] = label1
print('region',data2.shape)
print('estimation', data3.shape)
if select_id==1:
signal = np.asarray(data3['signal'])
median1 = np.median(signal)
thresh1 = np.quantile(signal,config['thresh1'])-1e-12
print(median1,thresh1)
# return -1
id1 = np.where(signal>thresh1)[0]
data3 = data3.loc[id1,:]
data3.reset_index(drop=True,inplace=True)
print(data3.shape,len(id1),np.median(data3['signal']))
id2, id2_1 = overlapping_with_regions(data2,data3)
data2 = data2.loc[id2,:]
data2.reset_index(drop=True,inplace=True)
print(data2.shape,len(id2))
elif select_id==2:
region_data2 = config['region_data2']
id1, id1_1 = overlapping_with_regions(data3,region_data2)
data3 = data3.loc[id1,:]
data3.reset_index(drop=True,inplace=True)
print(data3.shape,len(id1))
id2, id2_1 = overlapping_with_regions(data2,region_data2)
data2 = data2.loc[id2,:]
data2.reset_index(drop=True,inplace=True)
print(data2.shape,len(id2))
elif select_id==3:
x = 1
else:
pass
data3_1 = data3.loc[:,['chrom','start','stop','serial','signal',label_name,label_name1]]
if save_mode==1:
if output_filename=='':
output_filename = filename1+'.label.txt'
data3_1.to_csv(output_filename,index=False,sep='\t')
return data3, data2
# location of elements
# input: filename1: annoation file
# filename2: genomic loci file
# output_filename: save serial of elements
# return: save serial of elements
def compare_with_regions_pre1(filename1,filename2,output_filename):
data1 = pd.read_csv(filename1,header=None,sep='\t')
data2 = pd.read_csv(filename2,sep='\t')
sample_num1 = data1.shape[0]
sample_num2 = data2.shape[0]
print(sample_num1,sample_num2)
colnames = list(data2)
chrom1, start1, stop1 = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2])
chrom2, start2, stop2, serial = np.asarray(data2[colnames[0]]), np.asarray(data2[colnames[1]]), np.asarray(data2[colnames[2]]), np.asarray(data2[colnames[3]])
signal = np.asarray(data2[colnames[4]])
chrom_num = 22
chrom_vec = ['chr%d'%(i) for i in range(1,chrom_num+1)]
id1, id2 = dict(), dict()
serial1, num_vec = -np.ones(sample_num1,dtype=np.int64), np.zeros(sample_num1,dtype=np.int64)
signal1 = np.zeros(sample_num1,dtype=np.float32)
for i in range(1,chrom_num+1):
t_chrom = 'chr%d'%(i)
b1 = np.where(chrom1==t_chrom)[0]
id1[t_chrom] = b1
b2 = np.where(chrom2==t_chrom)[0]
id2[t_chrom] = b2
print(t_chrom,len(b1),len(b2))
for id_1 in b1:
t_chrom1, t_start1, t_stop1 = chrom1[id_1], start1[id_1], stop1[id_1]
idx = np.where((start2[b2]<t_stop1)&(stop2[b2]>t_start1))[0]
if len(idx)>0:
serial1[id_1] = serial[b2[idx[0]]]
num_vec[id_1] = len(idx)
signal1[id_1] = signal[b2[idx[0]]]
colnames = list(data1)
data1['serial'] = serial1
data1['num'] = num_vec
data1['signal'] = signal1
data1 = data1.loc[:,colnames[0:3]+[(colnames[-2])]+['serial','num','signal']]
data1.to_csv(output_filename,index=False,header=False,sep='\t')
return True
# location of elements
# input: filename1: annotation file 1
# filename2: annotation file 2
# output_filename: save serial of elements
# return: save label of elements
def compare_with_regions_pre2(filename1,filename2,output_filename,type_id=0,chrom_num=22):
if type_id==0:
data1 = pd.read_csv(filename1,header=None,sep='\t')
else:
data1 = pd.read_csv(filename1,sep='\t')
data2 = pd.read_csv(filename2,header=None,sep='\t')
sample_num1 = data1.shape[0]
sample_num2 = data2.shape[0]
print(sample_num1,sample_num2)
colnames = list(data1)
chrom1, start1, stop1 = np.asarray(data1[colnames[0]]), np.asarray(data1[colnames[1]]), np.asarray(data1[colnames[2]])
colnames = list(data2)
chrom2, start2, stop2 = np.asarray(data2[colnames[0]]), np.asarray(data2[colnames[1]]), np.asarray(data2[colnames[2]])
chrom_vec = ['chr%d'%(i) for i in range(1,chrom_num+1)]
id1, id2 = dict(), dict()
label = np.zeros(sample_num1,dtype=np.int64)
for i in range(1,chrom_num+1):
t_chrom = 'chr%d'%(i)
b1 = np.where(chrom1==t_chrom)[0]
id1[t_chrom] = b1
b2 = np.where(chrom2==t_chrom)[0]
id2[t_chrom] = b2
print(t_chrom,len(b1),len(b2))
for id_1 in b1:
t_chrom1, t_start1, t_stop1 = chrom1[id_1], start1[id_1], stop1[id_1]
idx = np.where((start2[b2]<t_stop1)&(stop2[b2]>t_start1))[0]
if len(idx)>0:
id_2 = b2[idx]
overlapping = 0
for t_id in id_2:
t_start2, t_stop2 = start2[t_id], stop2[t_id]
overlapping += np.min([t_stop1-t_start2,t_stop2-t_start2,t_stop2-t_start1,t_stop1-t_start1])
label[id_1] = overlapping
colnames = list(data1)
data1['label'] = label
data1.to_csv(output_filename,index=False,header=False,sep='\t')
return True
# local peak search: attention score
def compare_with_regions_peak_search(chrom,start,stop,serial,value,seq_list,config={}):
thresh_vec = config['thresh_vec']
peak_type = config['peak_type']
if 'distance_peak_thresh' in config:
distance_thresh = config['distance_peak_thresh']
else:
distance_thresh = 5
sample_num = len(chrom)
thresh_num = len(thresh_vec)
dict1 = dict()
for thresh in thresh_vec:
dict1[thresh] = []
width_thresh = 5
for t_seq in seq_list:
pos1, pos2 = t_seq[0], t_seq[1]+1
b1 = np.asarray(range(pos1,pos2))
x = value[b1]
chrom_id = chrom[pos1]
t_serial = serial[b1]
# s1, s2 = np.max(x), np.min(x)
# print(chrom_id,len(x),np.max(x,axis=0),np.min(x,axis=0),np.median(x,axis=0))
x1, x2 = x[:,0], x[:,1] # x1: find peak, x2: prominence
if peak_type==0:
threshold = config['threshold']
if threshold>0:
peaks, c1 = find_peaks(x1,distance=distance_thresh,threshold=threshold,width=(1,10),plateau_size=(1,10))
else:
peaks, c1 = find_peaks(x1,distance=distance_thresh,width=(1,10),plateau_size=(1,10))
else:
if 'width' in config:
width = config['width']
else:
width = 10
width1 = np.arange(1,width+1)
peaks = find_peaks_cwt(x1, width1)
if len(peaks)>0:
# print(peak_type,x1[peaks],t_serial[peaks])
dict1 = peak_region_search_1(x1,x2,peaks,b1,width_thresh,thresh_vec,dict1)
# label = np.zeros((sample_num,thresh_num,2),dtype=np.int64)
print(dict1.keys())
dict2 = dict()
for l in range(thresh_num):
thresh = thresh_vec[l]
list1 = dict1[thresh]
list1 = np.asarray(list1)
# print(list1.shape)
id1 = list1[:,0]
serial1 = serial[id1]
id1 = mapping_Idx(serial,serial1)
id_1 = np.argsort(serial1)
id1 = id1[id_1]
annot1 = [thresh,0]
dict2[thresh] = [chrom[id1],start[id1],stop[id1],serial[id1],annot1]
return dict2
# local peak search: attention score
def compare_with_regions_peak_search1(chrom,start,stop,serial,value,seq_list,thresh_vec=[0.9]):
sample_num = len(chrom)
thresh_num = len(thresh_vec)
dict1 = dict()
for thresh in thresh_vec:
list1, list2 = [], []
dict1[thresh] = [list1,list2]
width_thresh = 5
for t_seq in seq_list:
pos1, pos2 = t_seq[0], t_seq[1]+1
b1 = np.asarray(range(pos1,pos2))
x = value[b1]
chrom_id = chrom[pos1]
t_serial = serial[b1]
# s1, s2 = np.max(x), np.min(x)
print(chrom_id,len(x),np.max(x,axis=0),np.min(x,axis=0),np.median(x,axis=0))
x1, x2 = x[:,0], x[:,1]
peaks, c1 = find_peaks(x1,distance=10,width=(1,10),plateau_size=(1,10))
width1 = np.arange(1,11)
peaks_cwt = find_peaks_cwt(x1, width1)
if len(peaks)>0:
dict1 = peak_region_search(x1,x2,peaks,b1,width_thresh,thresh_vec,dict1,type_id2=0)
if len(peaks_cwt)>0:
dict1 = peak_region_search(x1,x2,peaks_cwt,b1,width_thresh,thresh_vec,dict1,type_id2=1)
label = np.zeros((sample_num,thresh_num,2),dtype=np.int64)
print(dict1.keys())
dict2 = dict()
for l in range(thresh_num):
thresh = thresh_vec[l]
list1, list2 = dict1[thresh]
list1 = np.asarray(list1)
list2 = np.asarray(list2)
print(len(list1),len(list2))
id1, id2 = list1[:,0], list2[:,0]
serial1, serial2 = serial[id1], serial[id2]
id_1, id_2 = np.argsort(serial1), np.argsort(serial2)
id1, id2 = id1[id_1], id2[id_2]
annot1, annot2 = [thresh,0], [thresh,1]
dict2[thresh] = [[chrom[id1],start[id1],stop[id1],serial[id1],annot1],
[chrom[id2],start[id2],stop[id2],serial[id2],annot2]]
return dict2
# local peak search: signal
def compare_with_regions_peak_search2(chrom,start,stop,serial,value,seq_list,thresh_vec=[0],config={}):
sample_num = len(chrom)
thresh_num = len(thresh_vec)
dict1 = dict()
for thresh in thresh_vec:
list1, list2 = [], []
dict1[thresh] = [list1,list2]
# width_thresh = 5
prominence_thresh = 0
distance_thresh, width_thresh = 20, 20
if len(config)>0:
prominence_thresh, distance_thresh, width_thresh = config['prominence_thresh'], config['distance_thresh'], config['width_thresh']
for t_seq in seq_list:
pos1, pos2 = t_seq[0], t_seq[1]+1
b1 = np.asarray(range(pos1,pos2))
x = value[b1]
chrom_id = chrom[pos1]
t_serial = serial[b1]
# s1, s2 = np.max(x), np.min(x)
print(chrom_id,len(x),np.max(x,axis=0),np.min(x,axis=0),np.median(x,axis=0))
peaks, c1 = find_peaks(x,distance=distance_thresh,width=(1,width_thresh),plateau_size=(1,10))
# if prominence_thresh>0:
# peaks, c1 = find_peaks(x,distance=distance_thresh,width=(1,width_thresh),prominence=prominence_thresh,plateau_size=(1,10))
# else:
# peaks, c1 = find_peaks(x,distance=distance_thresh,width=(1,width_thresh),plateau_size=(1,10))
width1 = np.arange(1,width_thresh+1)
peaks_cwt = find_peaks_cwt(x, width1)
if len(peaks)>0:
dict1 = peak_region_search(x,x,peaks,b1,width_thresh,thresh_vec,dict1,type_id2=0)
if len(peaks_cwt)>0:
dict1 = peak_region_search(x,x,peaks_cwt,b1,width_thresh,thresh_vec,dict1,type_id2=1)
label = np.zeros((sample_num,thresh_num,2),dtype=np.int64)
print(dict1.keys())
dict2 = dict()
for l in range(thresh_num):
thresh = thresh_vec[l]
list1, list2 = dict1[thresh]
list1 = np.asarray(list1)
list2 = np.asarray(list2)
print(len(list1),len(list2))
id1, id2 = list1[:,0], list2[:,0]
serial1, serial2 = serial[id1], serial[id2]
id_1, id_2 = np.argsort(serial1), np.argsort(serial2)
id1, id2 = id1[id_1], id2[id_2]
annot1, annot2 = [thresh,0], [thresh,1]
dict2[thresh] = [[chrom[id1],start[id1],stop[id1],serial[id1],annot1],
[chrom[id2],start[id2],stop[id2],serial[id2],annot2]]
return dict2
def compare_with_regions_init_search(chrom,start,stop,serial,init_zone,attention1,thresh,flanking=30,bin_size=5000):
chrom_1, start_1, stop_1 = init_zone # init zone
chrom_vec = np.unique(chrom)
id1 = np.where(attention1>thresh)[0]
num1 = len(attention1)
num1_thresh = len(id1)
tol = flanking*bin_size
list1 = []
print(chrom_1[0:10],start_1[0:10],stop_1[0:10])
print(flanking,tol)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
b2 = np.where(chrom_1==chrom_id)[0]
num2 = len(b2)
for i in range(num2):
t_chrom1, t_start1, t_stop1 = chrom_1[i], start_1[i], stop_1[i]
t_start_1, t_stop_1 = t_start1-tol, t_stop1+tol
t_id2 = np.where((start[b1]<t_stop_1)&(stop[b1]>t_start_1))[0]
t_id2 = b1[t_id2]
list1.extend(t_id2)
list1 = np.asarray(list1)
id2 = np.intersect1d(list1,id1)
chrom1, start1, stop1, serial1 = chrom[id2], start[id2], stop[id2], serial[id2]
annot1 = [thresh,flanking]
print('compare with regions init search',len(id2),num1_thresh,num1,len(id2)/num1_thresh,num1_thresh/num1)
return (chrom1, start1, stop1, serial1, annot1)
def compare_with_regions_signal_search(chrom,start,stop,serial,signal_vec,attention1,thresh=0.95,flanking=30):
signal, predicted_signal = signal_vec[:,0], singal_vec[:,1]
mse = np.abs(signal-predicted_signal)
thresh1, thresh2 = np.quantile(mse,[0.5,0.25])
thresh_1, thresh_2 = np.quantile(signal,[0.55,0.45])
id1 = np.where(attention1>thresh)[0]
num1 = len(attention1)
num1_thresh = len(id1)
t_vec1 = np.zeros(num1,dtype=bool)
vec1 = score_2a(signal,predicted_signal)
vec2 = score_2a(signal[id1],predicted_signal[id1])
# for t_seq in seq_list:
# pos1, pos2 = t_seq[0], t_seq[1]+1
# b1 = np.where((id1<pos2)&(id1>=pos1))[0]
chrom_vec = np.unique(chrom)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
b2 = np.intersect1d(id1,b1)
num2 = len(b2)
t_serial = serial[b2]
for i in range(num2):
t_id1 = t_serial[i]
id2 = np.where((serial[b1]>=t_id1-flanking)&(serial[b1]<=t_id1+flanking))[0]
id2 = b1[id2]
t_signal, t_pred = signal[id2], predicted_signal[id2]
error = np.abs(t_signal-t_pred)
flag1 = np.asarray([np.mean(t_signal)>thresh_1,np.mean(t_signal)<thresh_2])
flag2 = np.asarray([np.mean(t_pred)>thresh_1,np.mean(t_pred)<thresh_2])
temp1 = flag1^flag2
t_vec1[b2[i]] = (np.median(error)<thresh2)&(np.sum(temp1)==0)
id2 = (t_vec1>0)
chrom1, start1, stop1, serial1 = chrom[id2], start[id2], stop[id2], serial[id2]
annot1 = [thresh,flanking,thresh2,thresh_1,thresh_2]
print('compare with regions signal search',len(id2),num1_thresh,num1,len(id2)/num1_thresh,num1_thresh/num1)
print(vec1)
print(vec2)
return (chrom1, start1, stop1, serial1, annot1)
def compare_with_regions_single(chrom,start,stop,serial,label,value,attention1,thresh_vec=[0.05,0.1],value1=[],config={}):
thresh, thresh_fdr = thresh_vec
b1 = np.where(label>0)[0]
b2 = np.where(label==0)[0]
num1, num2 = len(b1), len(b2)
region_num = len(chrom)
print(num1, num2, num1/region_num)
n_dim = value.shape[1]
value2 = value[b2]
mean_value2 = np.mean(value2,axis=0)+1e-12
thresh_1 = np.zeros(n_dim)
thresh_2 = np.zeros(n_dim)
for i in range(n_dim):
thresh_1[i] = np.quantile(value2[:,i],0.95)
thresh_2[i] = np.quantile(value2[:,i],0.05)
value1 = value[b1]
fold_change = value1/(np.outer(np.ones(num1),mean_value2))
fold_change = np.asarray(fold_change,dtype=np.float32)
print(value1.shape,value2.shape)
mtx1 = np.zeros((num1,n_dim),dtype=np.float32)
for i in range(num1):
if i%100==0:
print(i)
id_1 = (value2>=value1[i])
cnt1 = np.sum(id_1,axis=0)
mtx1[i] = cnt1/num2
# if i>100:
# break
# mtx1 = np.int8(value1>thresh_1)
# mtx1[value1<thresh_2] = -1
mean_fold_change = np.mean(fold_change,axis=0)
id1 = np.argsort(-mean_fold_change)
print(np.max(fold_change),np.min(fold_change))
print(np.max(mtx1),np.min(mtx1))
print('compare with regions single')
if 'feature_name_list' in config:
feature_name_list = config['feature_name_list']
feature_name_list = feature_name_list[id1]
else:
feature_name_list = id1
fold_change = fold_change[:,id1]
mtx1 = mtx1[:,id1]
fields = ['chrom','start','stop','serial','label','predicted_attention']+list(feature_name_list)
data2 = pd.DataFrame(columns=fields)
data2['chrom'], data2['start'], data2['stop'] = chrom[b1], start[b1], stop[b1]
data2['serial'], data2['label'] = serial[b1], label[b1]
data2['predicted_attention'] = attention1[b1]
data_2 = data2.copy()
data2.loc[:,feature_name_list] = fold_change
data_2.loc[:,feature_name_list] = mtx1
print(data2.shape, data_2.shape)
return data2, data_2, feature_name_list
def compare_with_regions_distribute(chrom,start,stop,serial,label,value,thresh_vec=[0.05,0.1],value1=[],config={}):
thresh, thresh_fdr = thresh_vec
b1 = np.where(label>0)[0]
b2 = np.where(label==0)[0]
num1, num2 = len(b1), len(b2)
region_num = len(chrom)
# print(num1, num2, num1/region_num)
alternative, feature_name, plot_id = 'two-sided', 'Element', 0
if 'alternative' in config:
alternative = config['alternative']
if 'feature_name' in config:
feature_name = config['feature_name']
if 'plot_id' in config:
plot_id = config['plot_id']
data1, data2 = value[b1], value[b2]
value1, value2 = score_2a_1(data1, data2, alternative=alternative) # value1: p-value, value2: statistics
mannwhitneyu_pvalue,ks_pvalue = value1[0], value1[1]
mean_fold_change = np.mean(data1)/(np.mean(data2)+1e-12)
t1, t2 = np.median(data1), np.median(data2)
thresh1 = 1e-05
thresh2 = 0.5
flag1 = 0
median_ratio = (t1-t2)/(t2+1e-12)
if (mannwhitneyu_pvalue<thresh) and (ks_pvalue<thresh):
flag1 = 1
# print(feature_name, mannwhitneyu_pvalue, ks_pvalue,t1,t2,median_ratio)
if median_ratio>thresh2:
flag1 = 2
if median_ratio<-thresh2:
flag1 = 3
if (mannwhitneyu_pvalue<thresh1) and (ks_pvalue<thresh1):
flag1 = 4
if median_ratio>thresh2:
flag1 = 5
if median_ratio<-thresh2:
flag1 = 6
if flag1>=5:
print(feature_name, mannwhitneyu_pvalue, ks_pvalue,t1,t2,median_ratio)
output_filename = config['output_filename']
if flag1>1 and plot_id==1:
celltype_id = config['celltype_id']
label_1 = label[label>=0]
value_1 = value[label>=0]
output_filename1 = '%s_%s'%(output_filename,feature_name)
annotation_vec = ['Estimated region','Background',feature_name,celltype_id,flag1]
plot_sub1(label_1,value_1,output_filename1,annotation_vec)
return value1, value2, (t1,t2,median_ratio,mean_fold_change), flag1
def plot_sub1(label,value,output_filename1,annotation_vec):
params = {
'axes.labelsize': 12,
'axes.titlesize': 16,
'xtick.labelsize':12,
'ytick.labelsize':12}
pylab.rcParams.update(params)
id1 = np.where(label>0)[0]
id2 = np.where(label==0)[0]
num1, num2 = len(id1), len(id2)
label1 = [annotation_vec[0]]*num1 + [annotation_vec[1]]*num2
label1 = np.asarray(label1)
fields = ['label','mean']
data1 = pd.DataFrame(columns=fields)
data1['label'] = label1
data1['mean'] = value
# output_filename1 = '%s.h5'%(output_filename)
# with h5py.File(output_filename1,'a') as fid:
# fid.create_dataset("vec", data=data_3, compression="gzip")
# vec1 = ['ERCE','Background']
vec1 = annotation_vec
fig = plt.figure(figsize=(12,11))
cnt1, cnt2, vec2 = 1, 0, ['mean']
sel_idList = ['mean']
num2 = len(sel_idList)
feature_name = annotation_vec[2]
for sel_id in sel_idList:
# print(sel_id)
plt.subplot(num2,2,cnt1)
plt.title('%s'%(feature_name))
# sns.violinplot(x='label', y=sel_id, data=data_3)
sns.boxplot(x='label', y=sel_id, data=data1, showfliers=False)
# ax.get_xaxis().set_ticks([])
ax = plt.gca()
# ax.get_xaxis().set_visible(False)
ax.xaxis.label.set_visible(False)
cnt1 += 1
# output_filename1 = '%s_%s_1.png'%(output_filename,sel_id)
# plt.savefig(output_filename1,dpi=300)
plt.subplot(num2,2,cnt1)
plt.title('%s'%(feature_name))
ax = plt.gca()
ax.xaxis.label.set_visible(False)
cnt1 += 1
cnt2 += 1
for t_label in vec1:
b1 = np.where(label1==t_label)[0]
# t_mtx = data_3[data_3['label']==t_label]
t_mtx = data1.loc[b1,fields]
sns.distplot(t_mtx[sel_id], hist = True, kde = True,
kde_kws = {'shade':True, 'linewidth': 3},
label = t_label)
# output_filename2 = '%s_%s_2.png'%(output_filename,sel_id)
# plt.savefig(output_filename2,dpi=300)
file_path = './'
cell_id = annotation_vec[3]
flag = annotation_vec[-1]
output_filename1 = '%s/%s_%s_%d.png'%(file_path,output_filename1,cell_id,flag)
plt.savefig(output_filename1,dpi=300)
return True
def compare_with_regions_distribute_test(motif_data,motif_name,est_data,label,thresh_vec=[0.05,0.1],config={}):
chrom, start, stop, serial = np.asarray(est_data['chrom']), np.asarray(est_data['start']), np.asarray(est_data['stop']), np.asarray(est_data['serial'])
# attention_1 = np.asarray(est_data['predicted_attention'])
# attention1 = np.asarray(est_data[sel_column])
motif_num = len(motif_name)
motif_data1 = np.asarray(motif_data.loc[:,motif_name])
vec1 = np.zeros((motif_num,8),dtype=np.float32)
vec2 = np.zeros(motif_num,dtype=np.int8)
plot_id = 1
config.update({'plot_id':plot_id})
b1 = np.where(label>0)[0]
b2 = np.where(label==0)[0]
num1, num2 = len(b1), len(b2)
region_num = len(chrom)
print(num1, num2, num1/region_num)
for i in range(motif_num):
value = motif_data1[:,i]
config.update({'feature_name':motif_name[i]})
value1, value2, t_vec1, flag1 = compare_with_regions_distribute(chrom,start,stop,serial,label,value,thresh_vec=[0.05,0.1],config=config)
t1, t2, median_ratio, mean_fold_change = t_vec1
vec1[i] = [i+1,flag1]+list(value1)+[t1, t2, median_ratio, mean_fold_change]
vec2[i] = flag1
print(vec2,np.max(vec2),np.min(vec2))
# p-value correction with Benjamini-Hochberg correction procedure
thresh, thresh_fdr = thresh_vec
list1, list2 = [], []
list1_1, list2_2 = [], []
vec1_fdr = np.zeros((motif_num,2))
for i in range(2):
vec2 = multipletests(vec1[:,i+2],alpha=thresh_fdr,method='fdr_bh')
vec1_fdr[:,i] = vec2[1]
b1 = np.where(vec1[:,i+2]<thresh)[0]
b2 = np.where(vec1_fdr[:,i]<thresh_fdr)[0]
if i==0:
id1, id2 = b1, b2
else:
id1, id2 = np.intersect1d(id1,b1), np.intersect1d(id2,b2)
print(len(b1),len(b2),len(id1),len(id2))
# print(motif_name[id2])
list1.append(b1)
list2.append(b2)
print('compare_with_regions_distribute_test')
print('mannwhitneyu pvalue')
print(motif_name[list2[0]])
print('ks pvalue')
print(motif_name[list2[1]])
print(motif_name[id2])
vec1 = np.asarray(vec1)
celltype_id = config['celltype_id']
output_filename1 = config['output_filename']
output_filename = '%s_%d.txt'%(output_filename1,celltype_id)
fields = ['motif_id','number','flag','mannwhitneyu_pvalue','ks_pvalue','median_1','median_2','median_ratio','mean_fold_change']
data1 = pd.DataFrame(columns=fields)
data1['motif_id'] = motif_name
for i in range(1,3):
data1[fields[i]] = np.int64(vec1[:,i-1])
data1.loc[:,fields[3:]] = vec1[:,2:]
# np.savetxt(output_filename,vec1,fmt='%.4f',delimiter='\t')
data1.to_csv(output_filename,index=False,sep='\t')
return True
def generate_sequences_chrom(chrom,serial,gap_tol=5,region_list=[]):
num1 = len(chrom)
idx_sel_list = np.zeros((num1,2),dtype=np.int64)
chrom_vec = np.unique(chrom)
for chrom_id in chrom_vec:
try:
chrom_id1 = int(chrom_id[3:])
except:
print(chrom_id)
continue
b1 = np.where(chrom==chrom_id)[0]
idx_sel_list[b1,0] = chrom_id1
idx_sel_list[b1,1] = serial[b1]
b1 = (idx_sel_list[:,0]>0)
idx_sel_list = idx_sel_list[b1]
print('idx_sel_list',idx_sel_list.shape)
seq_list = generate_sequences(idx_sel_list, gap_tol=gap_tol, region_list=region_list)
return seq_list
def output_generate_sequences(chrom,start,stop,serial,idx_sel_list,seq_list,output_filename,save_mode=1):
num1 = len(seq_list)
t_serial1 = idx_sel_list[:,1]
seq_list = np.asarray(seq_list)
t_serial = t_serial1[seq_list]
id1 = mapping_Idx(serial,t_serial[:,0])
chrom1, start1, stop1 = chrom[id1], start[id1], stop[id1]
id2 = mapping_Idx(serial,t_serial[:,1])
chrom2, start2, stop2 = chrom[id2], start[id2], stop[id2]
fields = ['chrom','start','stop','serial1','serial2']
data1 = pd.DataFrame(columns=fields)
data1['chrom'], data1['start'], data1['stop'] = chrom1, start1, stop2
data1['serial1'], data1['serial2'] = t_serial[:,0], t_serial[:,1]
data1['region_len'] = t_serial[:,1]-t_serial[:,0]+1
if save_mode==1:
data1.to_csv(output_filename,index=False,sep='\t')
return data1
# query predicted signal and importance score for specific regions
def query_importance_score(region_list,filename1,filename2,thresh=0.75):
data_1 = pd.read_csv(filename1,sep='\t')
data_2 = pd.read_csv(filename2,sep='\t')
serial = np.asarray(data_2['serial'])
signal, predicted_signal = np.asarray(data_2['signal']), np.asarray(data_2['predicted_signal'])
serial1 = np.asarray(data_1['serial'])
assert list(serial)==list(serial1)
score1 = np.asarray(data_1['Q2'])
thresh1, thresh2 = np.quantile(signal,0.525), np.quantile(signal,0.475)
thresh1_1, thresh2_1 = np.quantile(predicted_signal,0.525), np.quantile(predicted_signal,0.475)
region_num = len(region_list)
flag_vec = -10*np.ones((region_num,2),dtype=np.int8)
# thresh = 0.75
list1 = -np.ones((region_num,7))
for i in range(region_num):
region = region_list[i]
serial_start, serial_stop = region[0], region[1]
b1 = np.where((serial<=serial_stop)&(serial>=serial_start))[0]
if len(b1)==0:
list1[i,0:2] = [serial_start,serial_stop]
continue
t_score = score1[b1]
t_signal = signal[b1]
t_predicted_signal = predicted_signal[b1]
b2 = np.where(t_score>thresh)[0]
temp1 = [np.max(t_score), np.mean(t_score),len(b2)]
temp2_1 = [np.max(t_signal), np.min(t_signal), np.median(t_signal)]
temp2_2 = [np.max(t_predicted_signal), np.min(t_predicted_signal), np.median(t_predicted_signal)]
b_1 = (temp2_1[-1]>thresh1)
if temp2_1[-1]<thresh2:
b_1 = -1
b_2 = (temp2_2[-1]>thresh1_1)
if temp2_2[-1]<thresh2_1:
b_2 = -1
flag_vec[i] = [b_1,b_2]
list1[i] = [serial_start,serial_stop]+temp2_1[2:]+temp2_2[2:]+temp1
return flag_vec, list1
def compare_with_regions_motif1_1(est_data,sel_idvec=[1,1,1,1],sel_column='Q2.adj',thresh1=0.95,config={}):
chrom, start, stop, serial = np.asarray(est_data['chrom']), np.asarray(est_data['start']), np.asarray(est_data['stop']), np.asarray(est_data['serial'])
signal = np.asarray(est_data['signal'])
attention_1 = np.asarray(est_data['predicted_attention'])
attention1 = np.asarray(est_data[sel_column])
print(est_data.shape)
value = np.column_stack((attention_1,attention1))
seq_list = generate_sequences_chrom(chrom,serial)
dict1 = dict()
if sel_idvec[0]>0:
thresh_vec = [0.9]
dict2 = compare_with_regions_peak_search1(chrom,start,stop,serial,value,seq_list,thresh_vec)
dict1[0] = dict2[thresh_vec[0]]
if sel_idvec[1]>0:
init_zone = config['init_zone']
flanking = config['flanking1']
chrom2,start2,stop2,serial2,annot2 = compare_with_regions_init_search(chrom,start,stop,serial,init_zone,attention1,thresh1,flanking=flanking)
dict1[1] = [chrom2,start2,stop2,serial2,annot2]
if sel_idvec[2]>0:
value = signal
dict_2 = compare_with_regions_peak_search2(chrom,start,stop,serial,value,seq_list)
chrom_local,start_local,stop_local,serial_local,annot_local = dict_2[0][1]
local_peak = [chrom_local,start_local,stop_local]
flanking = config['flanking1']
chrom_2,start_2,stop_2,serial_2,annot_2 = compare_with_regions_init_search(chrom,start,stop,serial,local_peak,attention1,thresh1,flanking=flanking)
dict1[2] = [chrom_2,start_2,stop_2,serial_2,annot_2]
if sel_idvec[3]>0:
signal_vec = np.asarray(est_data.loc[:,['signal','predicted_signal']])
chrom3,start3,stop3,serial3,annot3 = compare_with_regions_signal_search(chrom,start,stop,serial,signal_vec,attention1,thresh1)
dict1[3] = [chrom3,start3,stop3,serial3,annot3]
# b1 = np.where(attention1>thresh1)[0]
# serial1 = serial[b1]
# region_num = len(chrom)
# label1 = np.zeros(region_num,dtype=np.int32)
# label1[b1] = -1
return dict1
def compare_with_regions_motif1_sub1(motif_data,motif_name,est_data,dict1,
sel_idvec=[2,1,1,1],sel_column='Q2.adj',thresh1=0.95):
chrom, start, stop, serial = np.asarray(est_data['chrom']), np.asarray(est_data['start']), np.asarray(est_data['stop']), np.asarray(est_data['serial'])
# attention_1 = np.asarray(est_data['predicted_attention'])
attention1 = np.asarray(est_data[sel_column])
b1 = np.where(attention1>thresh1)[0]
serial1 = serial[b1]
print(np.max(attention1),np.min(attention1),np.median(attention1),np.mean(attention1))
region_num = len(chrom)
label1 = np.zeros(region_num,dtype=np.int32)
num1 = len(sel_idvec)
if np.sum(sel_idvec)>0:
label1[b1] = -1
serial_2 = serial1
for i in range(num1):
sel_id = sel_idvec[i]
if sel_id==0:
continue
if i==0:
t_vec1 = dict1[i][sel_id-1]
else:
t_vec1 = dict1[i]
t_chrom1,t_start1,t_stop1,t_serial1,t_annot1 = t_vec1
serial_2 = np.intersect1d(serial_2,t_serial1)
id1 = mapping_Idx(serial,serial_2)
b1 = np.where(id1>=0)[0]
if len(b1)!=len(serial_2):
print('error!',len(b1),len(serial_2))
id1 = id1[b1]
label1[id1] = serial_2[b1]+1
else:
label1[b1] = np.arange(len(b1))+1
return label1
# select regions and exclude the regions from the original list of serials
# return: serial_list1: selected regions excluded
# serial_list2: selected regions
def select_region(chrom, start, stop, serial, regionlist_filename):
region_list = pd.read_csv(regionlist_filename,header=None,sep='\t')
colnames = list(region_list)
col1, col2, col3 = colnames[0], colnames[1], colnames[2]
chrom1, start1, stop1 = region_list[col1], region_list[col2], region_list[col3]
num1 = len(chrom1)
# serial_list1 = self.serial.copy()
serial_list2 = []
for i in range(0,num1):
b1 = np.where((chrom==chrom1[i])&(start>=start1[i])&(stop<=stop1[i]))[0]
serial_list2.extend(serial[b1])
print(len(serial),len(serial_list2))
serial_list1 = np.setdiff1d(serial,serial_list2)
return serial_list1, serial_list2
# generate sequences
# idx_sel_list: chrom, serial
# seq_list: relative positions
def generate_sequences(idx_sel_list, gap_tol=5, region_list=[]):
chrom = idx_sel_list[:,0]
chrom_vec = np.unique(chrom)
chrom_vec = np.sort(chrom_vec)
seq_list = []
# print(len(chrom),chrom_vec)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
t_serial = idx_sel_list[b1,1]
prev_serial = t_serial[0:-1]
next_serial = t_serial[1:]
distance = next_serial-prev_serial
b2 = np.where(distance>gap_tol)[0]
if len(region_list)>0:
b_1 = np.where(region_list[:,0]==chrom_id)[0]
b2 = np.setdiff1d(b2,region_list[b_1,1])
# print('gap',len(b2))
if len(b2)>0:
t_seq = list(np.vstack((b2[0:-1]+1,b2[1:])).T)
t_seq.insert(0,np.asarray([0,b2[0]]))
t_seq.append(np.asarray([b2[-1]+1,len(b1)-1]))
else:
t_seq = [np.asarray([0,len(b1)-1])]
# print(t_seq)
# print(chrom_id,len(t_seq),max(distance))
seq_list.extend(b1[np.asarray(t_seq)])
return np.asarray(seq_list)
# generate sequences
# idx_sel_list: chrom, serial
# seq_list: relative positions
# consider specific regions
def generate_sequences_1(idx_sel_list, gap_tol=5, region_list=[]):
chrom = idx_sel_list[:,0]
chrom_vec = np.unique(chrom)
chrom_vec = np.sort(chrom_vec)
seq_list = []
print(len(chrom),chrom_vec)
for chrom_id in chrom_vec:
b1 = np.where(chrom==chrom_id)[0]
t_serial = idx_sel_list[b1,1]
prev_serial = t_serial[0:-1]
next_serial = t_serial[1:]
distance = next_serial-prev_serial
b2 = np.where(distance>gap_tol)[0]
if len(region_list)>0:
b_1 = np.where(region_list[:,0]==chrom_id)[0]
b2 = np.setdiff1d(b2,region_list[b_1,1])
print('gap',len(b2))
if len(b2)>0:
t_seq = list(np.vstack((b2[0:-1]+1,b2[1:])).T)
t_seq.insert(0,np.asarray([0,b2[0]]))
t_seq.append(np.asarray([b2[-1]+1,len(b1)-1]))
else:
t_seq = [np.asarray([0,len(b1)-1])]
# print(t_seq)
print(chrom_id,len(t_seq),max(distance))
seq_list.extend(b1[np.asarray(t_seq)])
return np.asarray(seq_list)
# importance score
def estimate_regions_1(filename1,thresh=0.975,gap_tol=2):
data1 = pd.read_csv(filename1,sep='\t')
colnames1 = list(data1)
# print(colnames1)
# data3 = data3.sort_values(by=['serial'])
num1 = data3.shape[0]
print(num1)
# label1 = np.zeros(num1,dtype=np.int32)
chrom1, start1, stop1 = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop'])
serial = np.asarray(data1['serial'])
quantile_vec1 = ['Q1','Q2']
# attention1 = np.asarray(data3['predicted_attention'])
# attention2 = np.asarray(data3[quantile_vec1[type_id]]) # ranking
# sel_column = ['predicted_attention',quantile_vec1[type_id]]
sel_column = [quantile_vec1[type_id]]
attention1 = np.asarray(data1.loc[:,sel_column])
b1 = np.where(attention1>thresh)[0]
data3 = data1.loc[b1,colnames1]
n1, n2 = len(b1), len(serial)
ratio1 = len(b1)/len(serial)
print('thresh',thresh,n1,n2,ratio1)
chrom2, start2, stop2 = np.asarray(data3['chrom']), np.asarray(data3['start']), np.asarray(data3['stop'])
serial2 = np.asarray(data3['serial'])
return True
# sample regions
# input: filename_list: list of filenames of estimation files
# filename2: feature regions
def sample_region_1(filename_list,filename2,output_filename):
filename1 = filename_list[0]
compare_with_regions_pre(filename1,filename2,output_filename,tol=0,label_name='label')
def check_width(serial1,serial2,thresh1,type_id=0):
if type_id==0:
b1 = np.where(serial2<serial1-thresh1)[0]
serial2[b1] = serial1[b1]-thresh1
else:
b1 = np.where(serial2>serial1+thresh1)[0]
serial2[b1] = serial1[b1]+thresh1
return serial2
def peak_region_search_1(x,x1,peaks,serial,width_thresh,thresh_vec,dict1):
# estimate prominence of peaks
vec1 = peak_prominences(x, peaks)
value1, left1, right1 = vec1[0], vec1[1], vec1[2]
if len(peaks)==0:
return dict1
len1 = right1-left1
n1 = len(peaks)
# print(n1,len(serial),serial[0],serial[-1],np.max(len1),np.min(len1))
left1_ori = check_width(peaks,left1,width_thresh,type_id=0)
right1_ori = check_width(peaks,right1,width_thresh,type_id=1)
for thresh in thresh_vec:
list1 = dict1[thresh]
b1 = np.where(x1[peaks]>thresh)[0]
b2 = np.where(x1>thresh)[0]
n2, n3 = len(b1), len(b2)
# print(n2,n3,n2/(n1+1e-12),n2/(n3+1e-12))
peak1, left1, right1 = serial[peaks[b1]], serial[left1_ori[b1]], serial[right1_ori[b1]]
list1.extend(np.vstack((peak1,left1,right1)).T)
# print(thresh,len(list1),len(peak1))
# print(list1[0:10])
dict1[thresh] = list1
return dict1
def peak_region_search(x,x1,peaks,serial,width_thresh,thresh_vec,dict1,type_id2):
# estimate prominence of peaks
vec1 = peak_prominences(x, peaks)
value1, left1, right1 = vec1[0], vec1[1], vec1[2]
if len(peaks)==0:
return dict1
len1 = right1-left1
n1 = len(peaks)
print(n1,len(serial),serial[0],serial[-1],np.max(len1),np.min(len1))
left1_ori = check_width(peaks,left1,width_thresh,type_id=0)
right1_ori = check_width(peaks,right1,width_thresh,type_id=1)
for thresh in thresh_vec:
temp1 = dict1[thresh]
list1 = temp1[type_id2]
b1 = np.where(x1[peaks]>thresh)[0]
b2 = np.where(x1>thresh)[0]
n2, n3 = len(b1), len(b2)
print(n2,n3,n2/(n1+1e-12),n2/(n3+1e-12))
peak1, left1, right1 = serial[peaks[b1]], serial[left1_ori[b1]], serial[right1_ori[b1]]
list1.extend(np.vstack((peak1,left1,right1)).T)
print(thresh,len(list1),type_id2,len(peak1))
temp1[type_id2] = list1
dict1[thresh] = temp1
return dict1
# non-ERCE regions
def query_region2(data1,thresh=10,label_name='label'):
data3_1 = data1.loc[:,['chrom','start','stop','signal','serial',label_name]]
serial1, label1 = np.asarray(data3_1['serial']), np.asarray(data3_1[label_name])
start1, stop1 = np.asarray(data3_1['start']), np.asarray(data3_1['stop'])
chrom1 = np.asarray(data3_1['chrom'])
chrom_vec = np.unique(chrom1)
vec1, vec2 = [], []
for t_chrom in chrom_vec:
b1 = np.where(chrom1==t_chrom)[0]
t_label, t_serial = label1[b1], serial1[b1]
t_start, t_stop = start1[b1], stop1[b1]
b2 = np.where(t_label==0)[0]
t_serial2 = t_serial[b2]
gap1 = b2[1:]-b2[0:-1]
gap2 = t_serial2[1:]-t_serial2[0:-1]
id1 = np.where((gap2>thresh)|(gap1>1))[0]
# print('gap',len(id1))
if len(id1)>0:
t_seq = list(np.vstack((id1[0:-1]+1,id1[1:])).T)
t_seq.insert(0,np.asarray([0,id1[0]]))
t_seq.append(np.asarray([id1[-1]+1,len(b2)-1]))
# vec1.extend(t_seq)
else:
t_seq = [np.asarray([0,len(b2)-1])]
print(b2,len(b2))
# print(t_seq)
t_seq = np.asarray(t_seq)
t_seq = b2[t_seq]
# vec1.append(t_seq)
# print(t_seq)
num1 = len(t_seq)
print(t_chrom,num1,max(gap1),max(gap2))
for pair1 in t_seq:
t1, t2 = pair1[0], pair1[1]
vec2.append([t_start[t1], t_stop[t2], t_serial[t1], t_serial[t2],pair1[0],pair1[1]])
vec1.extend([t_chrom]*num1)
fields = ['chrom','start','stop','serial1','serial2','pos1','pos2']
data1 = pd.DataFrame(columns=fields)
data1['chrom'] = vec1
num2 = len(fields)
data1[fields[1:]] = np.asarray(vec2,dtype=np.int64)
return data1
def compare_with_regions_load_1(filename1,run_id,data1=[]):
if len(data1)==0:
data1 = pd.read_csv(filename1,sep='\t')
max_value1 = np.asarray(data1['max'])
id1_ori = np.where(max_value1!=-1)[0]
# data1 = data1.loc[id1_ori,:]
sample_num = len(id1_ori)
sample_num = data1.shape[0]
t_vec1 = ['1_0.9','2_0.9','1_0.95','2_0.95']
colnames = []
for temp1 in t_vec1:
colnames.append('%d_%s'%(run_id,temp1))
colnames.extend(['%d_pvalue'%(run_id),'%d_fdr'%(run_id),'%d_label'%(run_id)])
data2 = data1.loc[:,colnames]
max_value =np.asarray(data1['%d_max'%(run_id)])
chrom, start, stop, region_len = np.asarray(data1['chrom']), np.asarray(data1['start']), np.asarray(data1['stop']), np.asarray(data1['length'])
vec1 = []
mtx1 = np.asarray(data2)
id1 = np.where(mtx1[:,4]>0)[0]
id2 = np.where(mtx1[:,5]>0)[0]
id1_1 = np.union1d(id1,id2)
id3_1 = np.where((mtx1[:,2])>0)[0]
id3_2 = np.where((mtx1[:,3])>0)[0]
id3_3 = np.union1d(id3_1,id3_2)
thresh = -5
id5_1 = np.where((mtx1[:,2]<0)&(mtx1[:,2]>thresh))[0]
id5_2 = np.where((mtx1[:,3]<0)&(mtx1[:,3]>thresh))[0]
id5_3 = np.union1d(id5_1,id5_2)
id1_2 = np.union1d(id1_1,id3_3)
id1_3 = np.union1d(id1_2,id5_3)
id1_2_1 = np.union1d(id1,id3_3)
id1_3_1 = np.union1d(id1_2_1,id5_3)
id1_2_2 = np.union1d(id2,id3_3)
id1_3_2 = np.union1d(id1_2_2,id5_3)
thresh1 = 0.975
id6 = np.where(max_value>thresh1)[0]
vec1.append([len(id1),len(id2),len(id6),len(id3_1),len(id3_2),len(id5_3),len(id1_2),len(id1_3),len(id1_3)])
vec1.append([len(id1),len(id2),len(id6),len(id3_1),len(id3_2),len(id5_3),len(id1_2_1),len(id1_2_2),len(id1_3)])
vec1.append([len(id1),len(id2),len(id6),len(id3_1),len(id3_2),len(id5_3),len(id1_2_2),len(id1_3_2),len(id1_3_2)])
ratio_vec1 = np.asarray(vec1)*1.0/sample_num
print(ratio_vec1)
id_vec = (id1,id2,id6,id3_1,id3_2,id5_3,id1_2_1,id1_2_2,id1_3_1,id1_3_2,id1_3)
return ratio_vec1, data1, id_vec
# sample regions randomly to compare with elements
# compare with regions random: sample by region length
def compare_with_regions_random3(filename1,filename2,type_id1,region_filename='',output_filename='',tol=2,
sample_num=2000,type_id=1,label_name='label',
thresh_vec = [0.9,0.95,0.975,0.99,0.995],
thresh_fdr = 0.05, region_data=[],
quantile_vec1 = ['Q1','Q2']):
if region_filename=='':
region_filename = 'region1.%d.tol%d.1.txt'%(type_id1,tol)
region_list = []
if output_filename=='':
output_filename = 'region1.1.%d.tol%d.1.txt'%(type_id1,tol)
data1, data2 = compare_with_regions_pre(filename1,filename2,output_filename,tol,label_name,
save_mode=1,region_data=region_data)
if os.path.exists(region_filename)==True:
region_list1 = pd.read_csv(region_filename,sep='\t')
data2 = pd.read_csv(filename2,header=None,sep='\t')
print(region_filename,filename2)
else:
label_name1 = 'label_tol%d'%(tol)
print('data1',data1.shape)
print(list(data1))
region_list1 = query_region2(data1,label_name=label_name1)
region_list1.to_csv(region_filename,index=False,sep='\t')
region_chrom, pair1 = np.asarray(region_list1['chrom']), np.asarray(region_list1.loc[:,['pos1','pos2']])
# load ERCE files
# data2 = pd.read_csv(filename2,header=None,sep='\t')
colnames2 = list(data2)
col1, col2, col3 = colnames2[0], colnames2[1], colnames2[2]
chrom2, start2, stop2 = data2[col1], data2[col2], data2[col3]
data3 = data1
colnames1 = list(data3)
# print(colnames1)
# data3 = data3.sort_values(by=['serial'])
num1 = data3.shape[0]
print(num1)
label1 = np.zeros(num1,dtype=np.int32)
chrom1, start1, stop1 = np.asarray(data3['chrom']), np.asarray(data3['start']), np.asarray(data3['stop'])
# quantile_vec1 = ['Q1','Q2']
# attention1 = np.asarray(data3['predicted_attention'])
# attention2 = np.asarray(data3[quantile_vec1[type_id]]) # ranking
sel_column = ['predicted_attention',quantile_vec1[type_id]]
attention1 = np.asarray(data3.loc[:,sel_column])
chrom_vec = np.unique(chrom2)
chrom_num = len(chrom_vec)
chrom_size1 = len(chrom1)
bin_size = stop1[1]-start1[1]
num2 = len(chrom2)
sel_num = len(sel_column)
thresh_num = len(thresh_vec)
sel_num1 = 2*sel_num + thresh_num
score1 = -np.ones((num2,sel_num1*3),dtype=np.float32) # sampled region: mean, std
score2 = -np.ones((num2,sel_num1),dtype=np.float32) # sampled region: mean, std
vec1, vec1_1 = [], []
for t_chrom in chrom_vec:
# t_chrom = chrom_vec[i]
b2 = np.where(chrom2==t_chrom)[0]
t_num2 = len(b2)
b1 = np.where(chrom1==t_chrom)[0]
t_num1 = len(b1)
# print(t_chrom,t_num1,t_num2)
if t_num1==0 or t_num2==0:
print('chromosome not found', t_chrom)
continue
t_chrom_size = t_num1
# print('sample regions %d'%(sample_num),t_chrom,t_chrom_size)
t_chrom1, t_start1, t_stop1, t_attention1 = np.asarray(chrom1[b1]), np.asarray(start1[b1]), np.asarray(stop1[b1]), np.asarray(attention1[b1])
t_chrom_region, t_start_region, t_stop_region = np.asarray(chrom2[b2]), np.asarray(start2[b2]), np.asarray(stop2[b2])
t_region_len = t_stop_region-t_start_region
region_len_vec = np.unique(t_region_len)
# print('region_len_vec',t_chrom,len(region_len_vec),region_len_vec)
region_sample_dict = dict()
b_1 = np.where(region_chrom==t_chrom)[0]
region_list = pair1[b_1]
for region_len in region_len_vec:
region_len_tol = region_len + 2*tol*bin_size
vec2 = compare_with_regions_sub3(t_chrom1,t_start1,t_stop1,t_attention1,
sample_num,region_len_tol,region_list,bin_size,tol,
thresh_vec)
region_sample_dict[region_len] = vec2
# print('region_len',region_len,region_len_tol,vec2.shape)
for l in range(t_num2):
t_chrom2, t_start2, t_stop2 = t_chrom_region[l], t_start_region[l], t_stop_region[l]
# if l%100==0:
# print(t_chrom2, t_start2, t_stop2)
# print(t_stop,t_start)
region_len_ori = t_stop2-t_start2
region_len_ori1 = (region_len_ori)/bin_size
tol1 = tol
t_start_2 = max(0,t_start2-tol1*bin_size)
t_stop_2 = min(t_stop1[-1],t_stop2+tol1*bin_size)
len1 = (t_stop_2-t_start_2)/bin_size
b1_ori = np.where((t_start1<t_stop_2)&(t_stop1>t_start_2))[0]
if len(b1_ori)==0:
continue
b1_ori = b1[b1_ori]
i1 = b2[l]
for l1 in range(sel_num):
id2 = 2*l1
attention_score = attention1[b1_ori,l1]
#attention_score = t_attention1[b1_ori,l1]
score1[i1,id2:(id2+2)] = [np.max(attention_score),np.mean(attention_score)]
t_vec3 = []
attention_score = attention1[b1_ori,sel_num-1]
#attention_score = t_attention1[b1_ori,sel_num-1]
for l2 in range(thresh_num):
id1 = np.where(attention_score>thresh_vec[l2])[0]
score1[i1,(2*sel_num+l2)] = len(id1)/len1
vec2 = region_sample_dict[region_len]
# print(vec2.shape)
sample_num1, num3 = vec2.shape[0], vec2.shape[1]
assert num3==(sel_num1+2)
vec3 = []
for l1 in range(0,sel_num1):
value1 = vec2[:,l1+2]
t_score_mean1, t_score_std1 = np.mean(value1), np.std(value1)
vec3.extend([t_score_mean1,t_score_std1])
t1 = np.where(value1>score1[i1,l1]-1e-06)[0]
score2[i1,l1] = len(t1)/sample_num1
score1[i1,sel_num1:] = vec3
# if i%100==0:
# print(i,score1[i],len(vec2))
if i1%1000==0:
# print(t_chrom,i1,score1[i1],score2[i1],len(vec2),vec2.shape)
print(t_chrom2,t_start2,t_stop2,(t_stop2-t_start2)/bin_size,i1,score1[i1],score2[i1],len(vec2),vec2.shape)
# if l>10:
# break
# find chromosomes with estimation
# serial1 = find_serial(chrom2,chrom_num=len(np.unique(chrom1)))
# score2, score1 = score2[serial1], score1[serial1]
# data2 = data2.loc[serial1,:]
# fdr correction
b1 = np.where(score2[:,0]>=0)[0]
n1, n2 = score2.shape[0], score2.shape[1]
score2_fdr = -np.ones((n1,n2))
for i in range(n2):
vec2 = multipletests(score2[b1,i],alpha=thresh_fdr,method='fdr_bh')
score2_fdr[b1,i] = vec2[1]
return score2,score2_fdr,score1,data2
def signal_normalize(signal, scale):
s1, s2 = scale[0], scale[1]
s_min, s_max = np.min(signal), np.max(signal)
scaled_signal = s1+(signal-s_min)*1.0/(s_max-s_min)*(s2-s1)
return scaled_signal
def signal_normalize_query(query_point, scale_ori, scale):
s1, s2 = scale[0], scale[1]
s_min, s_max = scale_ori[0], scale_ori[1]
scaled_signal = s1+(query_point-s_min)*1.0/(s_max-s_min)*(s2-s1)
return scaled_signal
def one_hot_encoding(seq_data,serial):
enc=OneHotEncoder(categories=[['A','C','G','T']],sparse=False,dtype=np.int,handle_unknown='ignore')
n_sample = len(seq_data)
seq_len = len(seq_data[0])
list1 = np.zeros((n_sample,seq_len,4),dtype=np.int8)
list2 = -np.ones((n_sample,2),dtype=np.int64)
# list1, list2 = [], []
cnt1, cnt2 = 0, 0
print('region length', seq_len, n_sample)
print(serial)
for i in range(n_sample):
str1, serial1 = seq_data[i], serial[i]
seq_len1 = len(str1)
n1 = str1.count('N')
if seq_len1!=seq_len:
continue
if n1>seq_len1*0.1:
cnt1 = cnt1+1
if n1==seq_len1:
cnt2 = cnt2+1
continue
str1 = np.asarray(list(str1))
encoding = enc.fit_transform(str1[:,np.newaxis])
list1[i] = encoding
list2[i] = [serial1,n1]
if i%10000==0:
print(i,serial1)
list1 = np.asarray(list1)
list2 = np.asarray(list2)
b1 = np.where(list2[:,0]>=0)[0]
list1 = list1[b1]
list2 = list2[b1]
print('one hot encoding',n_sample,cnt1,cnt2,len(list2),list1.shape,cnt1,cnt2)
return list1, list2
def aver_overlap_value(position, start_vec, stop_vec, signal, idx):
t_start1, t_stop1 = start_vec[idx], stop_vec[idx]
t_len1 = t_stop1 - t_start1
t_len2 = (position[1] - position[0])*np.ones(len(idx))
temp1 = np.vstack((t_stop1-position[0],position[1]-t_start1,t_len1,t_len2))
temp1 = np.min(temp1,axis=0)
aver_value = np.dot(signal[idx],temp1)*1.0/np.sum(temp1)
return aver_value
def search_region(position, start_vec, stop_vec, m_idx, start_idx):
id1 = start_idx
vec1 = []
while (id1<=m_idx) and (stop_vec[id1]<=position[0]):
id1 += 1
while (id1<=m_idx) and (stop_vec[id1]>position[0]) and (start_vec[id1]<position[1]):
vec1.append(id1)
id1 += 1
if len(vec1)>0:
start_idx1 = vec1[-1]
else:
start_idx1 = id1
return np.asarray(vec1), start_idx1
def search_region_include(position, start_vec, stop_vec, m_idx, start_idx):
id1 = start_idx
vec1 = []
while (id1<=m_idx) and (start_vec[id1]<position[0]):
id1 += 1
while (id1<=m_idx) and (stop_vec[id1]<=position[1]) and (start_vec[id1]>=position[0]):
vec1.append(id1)
id1 += 1
return np.asarray(vec1), id1
# load chromosome 1
def query_region1(data1,chrom_name,chrom_size,bin_size,type_id1=0):
# data1 = pd.read_csv(filename,header=None)
chrom, start, stop, signal = np.asarray(data1[0]), np.asarray(data1[1]), np.asarray(data1[2]), np.asarray(data1[3])
region_len = stop-start
id1 = np.where(chrom==chrom_name)[0]
print("chrom",chrom_name,len(id1))
t_stop = stop[id1[-1]]
# bin_size = 200
num_region = int(chrom_size*1.0/bin_size)
serial_vec = np.zeros(num_region)
signal_vec = np.zeros(num_region)
start_vec = np.asarray(range(0,num_region))*bin_size
stop_vec = start_vec + bin_size
chrom1, start1, stop1, signal1 = chrom[id1], start[id1], stop[id1], signal[id1]
threshold = 1e-04
b1 = np.where(signal1<=threshold)[0]
b2 = np.where(signal1>threshold)[0]
region_len = stop1-start1
len1 = np.sum(region_len[b1])
len2 = np.sum(region_len[b2])
ratio1 = len1*1.0/np.sum(region_len)
ratio2 = 1-ratio1
print('chrom, ratio1 ratio2',chrom_name, ratio1, ratio2, len(b1), len(b2))
list1 = []
count = 0
start_idx = 0
print("number of regions", len(b1))
count2 = 0
time_1 = time.time()
m_idx = len(start_vec)-1
for id2 in b1:
# print(id2)
t_start, t_stop = start1[id2], stop1[id2] # position of zero region
position = [t_start,t_stop]
id3 = []
if start_idx<=m_idx:
id3, start_idx = search_region_include(position, start_vec, stop_vec, m_idx, start_idx)
# print(count,t_start,t_stop,t_stop-t_start,start_idx,len(id3))
if len(id3)>0:
# if count%500000==0:
# print(count,t_start,t_stop,len(id3),start_idx,start_vec[id3[0]],stop_vec[id3[-1]])
# print(count,t_start,t_stop,t_stop-t_start,id3[0],id3[-1],start_vec[id3[0]],stop_vec[id3[-1]],len(id3),len(id3)*bin_size)
# if count>500:
# break
list1.extend(id3)
count += 1
else:
count2 += 1
time_2 = time.time()
print("time: ", time_2-time_1)
list2 = np.setdiff1d(range(0,num_region),list1)
print("zero regions",len(list1), len(list2))
print("zero regions 2", count, count2)
# return False
start_idx = 0
count = 0
count1, count2 = 0, 0
time_1 = time.time()
# start1_ori, stop1_ori = start1.copy(), stop1.copy()
# start1, stop1 = start1[b2], stop1[b2] # regions with signal values higher than the threshold
list1, list2 = np.asarray(list1), np.asarray(list2)
num2 = len(list2)
# type_id1 = 0
m_idx = len(start1)-1
for id1 in list2:
t_start, t_stop = start_vec[id1], stop_vec[id1]
position = [t_start,t_stop]
if start_idx<=m_idx:
vec1, start_idx = search_region(position, start1, stop1, m_idx, start_idx)
if len(vec1)>0:
if type_id1==0:
aver_value = aver_overlap_value(position, start1, stop1, signal1, vec1)
signal_vec[id1] = aver_value
else:
signal_vec[id1] = np.max(signal1[vec1])
serial_vec[id1] = 1
count += 1
# if count%500000==0:
# id_1, id_2 = vec1[0], vec1[-1]
# print(count,t_start,t_stop,signal_vec[id1],id_1,id_2,start1[id_1],stop1[id_1],start1[id_2],stop1[id_2],len(vec1))
if start_idx>m_idx:
break
else:
count2 += 1
time_2 = time.time()
print("time: ", time_2-time_1)
print("serial, signal", np.max(serial_vec), np.max(signal_vec), count2)
return serial_vec, signal_vec
# return True
def generate_serial(filename1,chrom,start,stop):
# chrom_vec = np.sort(np.unique(chrom))
# print(chrom_vec)
chrom_vec = []
for i in range(1,23):
chrom_vec.append('chr%d'%(i))
chrom_vec += ['chrX']
chrom_vec += ['chrY']
print(chrom_vec)
print(chrom)
print(len(chrom))
data1 = pd.read_csv(filename1,header=None,sep='\t')
ref_chrom, chrom_size = np.asarray(data1[0]), np.asarray(data1[1])
serial_start = 0
serial_vec = np.zeros(len(chrom))
bin_size = stop[1]-start[1]
print(bin_size)
for chrom_id in chrom_vec:
b1 = np.where(ref_chrom==chrom_id)[0]
t_size = chrom_size[b1[0]]
b2 = np.where(chrom==chrom_id)[0]
if len(b1)>0:
size1 = int(np.ceil(t_size*1.0/bin_size))
serial = np.int64(start[b2]/bin_size)+serial_start
serial_vec[b2] = serial
print(chrom_id,b2,len(serial),serial_start,size1)
serial_start = serial_start+size1
else:
print("error!")
return
return np.int64(serial_vec)
def generate_serial_start(filename1,chrom,start,stop,chrom_num,type_id=0):
# chrom_vec = np.sort(np.unique(chrom))
# print(chrom_vec)
chrom_vec = []
for i in range(1,chrom_num+1):
chrom_vec.append('chr%d'%(i))
if type_id==0:
chrom_vec += ['chrX']
chrom_vec += ['chrY']
print(chrom_vec)
print(chrom)
print(len(chrom))
data1 = pd.read_csv(filename1,header=None,sep='\t')
ref_chrom, chrom_size = np.asarray(data1[0]), np.asarray(data1[1])
serial_start = 0
serial_vec = -np.ones(len(chrom),dtype=np.int64)
bin_size = stop[1]-start[1]
print(bin_size)
start_vec = dict()
for chrom_id in chrom_vec:
start_vec[chrom_id] = serial_start
b1 = np.where(ref_chrom==chrom_id)[0]
t_size = chrom_size[b1[0]]
b2 = np.where(chrom==chrom_id)[0]
if len(b1)>0:
size1 = int(np.ceil(t_size*1.0/bin_size))
if len(b2)>0:
serial = np.int64(start[b2]/bin_size)+serial_start
serial_vec[b2] = serial
print(chrom_id,b2,len(serial),serial_start,size1)
serial_start = serial_start+size1
else:
print("error!")
return
return np.asarray(serial_vec), start_vec
def generate_serial_single(chrom,start,stop):
# chrom_vec = np.sort(np.unique(chrom))
# print(chrom_vec)
serial_vec, start_vec = generate_serial_start()
chrom_vec = []
for i in range(1,23):
chrom_vec.append('chr%d'%(i))
chrom_vec += ['chrX']
chrom_vec += ['chrY']
print(chrom_vec)
print(chrom)
print(len(chrom))
data1 = pd.read_csv(filename1,header=None,sep='\t')
ref_chrom, chrom_size = np.asarray(data1[0]), np.asarray(data1[1])
serial_start = 0
serial_vec = []
bin_size = stop[1]-start[1]
print(bin_size)
start_vec = dict()
for chrom_id in chrom_vec:
start_vec[chrom_id] = serial_start
b1 = np.where(ref_chrom==chrom_id)[0]
t_size = chrom_size[b1[0]]
b2 = np.where(chrom==chrom_id)[0]
if len(b1)>0:
size1 = int(np.ceil(t_size*1.0/bin_size))
serial = np.int64(start[b2]/bin_size)+serial_start
serial_vec.extend(serial)
print(chrom_id,b2,len(serial),serial_start,size1)
serial_start = serial_start+size1
else:
print("error!")
return
return np.asarray(serial_vec), start_vec
def generate_serial_local(filename1,chrom,start,stop,chrom_num):
# chrom_vec = np.sort(np.unique(chrom))
# print(chrom_vec)
chrom_vec = []
for i in range(1,chrom_num+1):
chrom_vec.append('chr%d'%(i))
chrom_vec += ['chrX']
chrom_vec += ['chrY']
chrom_vec += ['chrM']
print(chrom_vec)
print(chrom)
print(len(chrom))
t_chrom = np.unique(chrom)
# filename1 = './genome/hg38.chrom.sizes'
data1 = pd.read_csv(filename1,header=None,sep='\t')
ref_chrom, chrom_size = np.asarray(data1[0]), np.asarray(data1[1])
# serial_start = np.zeros(len(chrom))
serial_start = 0
serial_start_1 = dict()
serial_vec = np.zeros(len(chrom))
bin_size = stop[1]-start[1]
print(bin_size)
for chrom_id in chrom_vec:
b1 = np.where(ref_chrom==chrom_id)[0]
t_size = chrom_size[b1[0]]
serial_start_1[chrom_id] = serial_start
size1 = int(np.ceil(t_size*1.0/bin_size))
serial_start = serial_start+size1
for chrom_id in t_chrom:
b2 = np.where(chrom==chrom_id)
serial = np.int64(start[b2]/bin_size)+serial_start_1[chrom_id]
serial_vec[b2] = serial
return np.int64(serial_vec)
def get_model2a_sequential(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
input1 = Input(shape = (input_shape,feature_dim))
lr = config['lr']
activation = config['activation']
model = keras.models.Sequential()
# model.add(Bidirectional(LSTM(input_shape=(10,feature_dim),units=output_dim,return_sequences = True,recurrent_dropout = 0.1)))
model.add(Bidirectional(LSTM(units=output_dim,return_sequences = True,recurrent_dropout = 0.1),input_shape=(input_shape,feature_dim)))
# model.add(LSTM(units=output_dim,return_sequences = True,recurrent_dropout = 0.1,input_shape=(input_shape,feature_dim)))
# model.add(Input(shape = (input_shape,feature_dim)))
# model.add(Bidirectional(LSTM(units=output_dim,return_sequences = True,recurrent_dropout = 0.1)))
model.add(LayerNormalization())
model.add(Flatten())
if fc1_output_dim>0:
model.add(Dense(units=fc1_output_dim))
model.add(BatchNormalization())
model.add(Activation(activation))
model.add(Dropout(0.5))
else:
pass
model.add(Dense(units=1))
model.add(BatchNormalization())
model.add(Activation("sigmoid"))
adam = Adam(lr = lr)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = 'mean_squared_error')
model.summary()
return model
def get_model2a1_sequential(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
# input1 = Input(shape = (input_shape,feature_dim))
lr = config['lr']
model = keras.models.Sequential()
model.add(Bidirectional(LSTM(units=output_dim,
return_sequences = True,
recurrent_dropout = 0.1),input_shape=(None, feature_dim)))
model.add(LayerNormalization())
if fc1_output_dim>0:
model.add(Dense(units=fc1_output_dim))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Dropout(0.5))
else:
pass
model.add(Dense(units=1))
model.add(BatchNormalization())
model.add(Activation("sigmoid"))
adam = Adam(lr = lr)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = 'mean_squared_error')
model.summary()
return model
def get_model2a1_attention_1(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
input1 = Input(shape = (n_steps,feature_dim))
biLSTM_layer1 = Bidirectional(LSTM(input_shape=(n_steps, feature_dim),
units=output_dim,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
activation = config['activation']
biLSTM_layer1 = Bidirectional(LSTM(input_shape=(None, feature_dim),
units=output_dim,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(input1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
# x1 = Activation('tanh',name='activation')(x1)
# x1 = Flatten()(x1)
if activation!='':
x1 = Activation(activation,name='activation')(x1)
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x_1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x1 = x_1[0]
# attention = x_1[1]
if fc1_output_dim>0:
dense1 = Dense(fc1_output_dim,name='dense1')(x1)
dense1 = BatchNormalization(name='batchnorm1')(dense1)
dense1 = Activation(activation,name='activation1')(dense1)
dense_layer_output = Dropout(0.5)(dense1)
output_dim1 = fc1_output_dim
else:
dense_layer_output = x1
output_dim1 = 2*output_dim
units_1 = config['units1']
if units_1>0:
dense_layer_1 = TimeDistributed(Dense(units_1,name='dense_0'))(input1)
dense_layer_2 = TimeDistributed(Dense(1,name='dense_1'))(dense_layer_1)
else:
dense_layer_2 = TimeDistributed(Dense(1,name='dense_1'))(input1)
attention1 = Flatten()(dense_layer_2)
attention1 = Activation('softmax',name='attention1')(attention1)
attention1 = RepeatVector(output_dim1)(attention1)
attention1 = Permute([2,1])(attention1)
layer_1 = Multiply()([dense_layer_output, attention1])
dense_layer_output = Lambda(lambda x: K.sum(x,axis=1))(layer_1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
output = Dense(1,name='dense2')(dense_layer_output)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation("sigmoid",name='activation2')(output)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = lr)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = 'mean_squared_error')
model.summary()
return model
def get_model2a1_attention_2(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
input1 = Input(shape = (n_steps,feature_dim))
biLSTM_layer1 = Bidirectional(LSTM(input_shape=(n_steps, feature_dim),
units=output_dim,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(input1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
# x1 = Activation('tanh',name='activation')(x1)
# x1 = Flatten()(x1)
if activation!='':
x1 = Activation(activation,name='activation')(x1)
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x_1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x1 = x_1[0]
# attention = x_1[1]
if fc1_output_dim>0:
dense1 = Dense(fc1_output_dim,name='dense1')(x1)
dense1 = BatchNormalization(name='batchnorm1')(dense1)
dense1 = Activation(activation,name='activation1')(dense1)
dense_layer_output = Dropout(0.5)(dense1)
output_dim1 = fc1_output_dim
else:
dense_layer_output = x1
output_dim1 = 2*output_dim
attention1 = TimeDistributed(Dense(1,name='dense_1'))(dense_layer_output)
attention1 = Flatten()(attention1)
attention1 = Activation('softmax',name='attention1')(attention1)
attention1 = RepeatVector(output_dim1)(attention1)
attention1 = Permute([2,1])(attention1)
layer_1 = Multiply()([dense_layer_output, attention1])
dense_layer_output = Lambda(lambda x: K.sum(x,axis=1))(layer_1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
output = Dense(1,name='dense2')(dense_layer_output)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation("sigmoid",name='activation2')(output)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = lr)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = 'mean_squared_error')
model.summary()
return model
def compute_mean_std(run_id, filename1, config={}):
data1 = pd.read_csv(filename1,sep='\t')
fields = ['chrom','start','stop','signal']
chrom, signal = np.asarray(data1['chrom']), np.asarray(data1['signal'])
predicted_signal = np.asarray(data1['predicted_signal'])
# if (np.min(predicted_signal)<-0.5) and (np.min(signal)>-0.5):
# predicted_signal = 0.5*predicted_signal+0.5
# if (np.min(predicted_signal)>-0.5) and (np.min(signal)<-0.5):
# signal = 0.5*signal+0.5
chrom_vec = np.unique(chrom)
chrom_num = len(chrom_vec)
chrom_vec1 = np.zeros(chrom_num,dtype=np.int32)
for i in range(chrom_num):
chrom_id = chrom_vec[i]
id1 = chrom_id.find('chr')
try:
chrom_vec1[i] = int(chrom_id[id1+3:])
except:
chrom_vec1[i] = i+1
id1 = np.argsort(chrom_vec1)
chrom_vec = chrom_vec[id1]
chrom_vec1 = chrom_vec1[id1]
print(chrom_vec)
print(chrom_vec1)
vec1 = score_2a(signal, predicted_signal)
mtx = np.zeros((chrom_num+1,len(vec1)))
field_num = len(vec1)
mtx[-1] = vec1
for i in range(chrom_num):
t_chrom = chrom_vec[i]
b = np.where(chrom==t_chrom)[0]
vec1 = score_2a(signal[b], predicted_signal[b])
print(t_chrom,vec1)
mtx[i] = vec1
# fields = ['run_id','chrom','mse','pearsonr','pvalue1','explained_variance',
# 'mean_abs_err','median_abs_err','r2','spearmanr','pvalue2']
fields = ['run_id','method','celltype','chrom','mse','pearsonr','pvalue1','explained_variance',
'mean_abs_err','median_abs_err','r2','spearmanr','pvalue2']
data1 = pd.DataFrame(columns=fields)
num2 = chrom_num+1
cell_type1, method1 = config['cell_type1'], config['method1']
data1['run_id'] = [run_id]*num2
data1['method'] = [method1]*num2
data1['celltype'] = [cell_type1]*num2
data1['chrom'] = list(chrom_vec)+['-1']
mtx = np.asarray(mtx,dtype=np.float32)
for i in range(field_num):
data1[fields[i+4]] = mtx[:,i]
if 'chrom_vec1_pre' in config:
chrom_vec1_pre = config['chrom_vec1_pre']
train_chrom_num = len(chrom_vec)-len(chrom_vec1_pre)
data1['train_chrNum'] = [train_chrom_num]*num2
if 'train_chrNum' in config:
train_chrom_num = config['train_chrNum']
data1['train_chrNum'] = [train_chrom_num]*num2
return data1
# construct gumbel selector 1
def construct_gumbel_selector1(input1,config,number=1,type_id=1):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation1 = config['activation']
# activation1 = 'relu'
feature_dim_vec1 = config['feature_dim_vec']
if 'local_conv_size' in config:
local_conv_size = config['local_conv_size']
else:
local_conv_size = 3
# input1 = Input(shape = (n_steps,feature_dim))
units_1 = config['units1']
if units_1>0:
# encode the input, shape: (batch_size,n_steps,units_1)
dense_layer_1 = Dense(units_1,name='dense1_gumbel_%d'%(number))(input1)
dense_layer_1 = BatchNormalization()(dense_layer_1)
dense_layer_1 = Activation(activation1,name = 'dense_gumbel_%d'%(number))(dense_layer_1)
else:
dense_layer_1 = input1
# default: n_filter1:50, dim1:25, n_filter2: 50, dim2: 25, n_local_conv: 0, concat: 0
# n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0], feature_dim_vec1[1], feature_dim_vec1[2], feature_dim_vec1[3], feature_dim_vec1[4], feature_dim_vec1[5]
n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0:6]
if n_filter1>0:
# layer_1 = Conv1D(n_filter1, local_conv_size, padding='same', activation=activation1, strides=1, name = 'conv1_gumbel_%d'%(number))(dense_layer_1)
layer_1 = Conv1D(n_filter1, local_conv_size, padding='same', strides=1, name = 'conv1_1_gumbel_%d'%(number))(dense_layer_1)
layer_1 = BatchNormalization()(layer_1)
layer_1 = Activation(activation1, name='conv1_gumbel_%d'%(number))(layer_1)
else:
layer_1 = dense_layer_1
local_info = layer_1
if n_local_conv>0:
for i in range(n_local_conv):
local_info = Conv1D(n_filter2, local_conv_size, padding='same', activation=activation1, strides=1, name = 'conv%d_gumbel_%d'%(i+2,number))(local_info)
# global info, shape: (batch_size, feature_dim)
if concat>0:
x1 = GlobalMaxPooling1D(name = 'new_global_max_pooling1d_%d'%(number))(layer_1)
if dim1>0:
global_info = Dense(dim1, name = 'new_dense_%d'%(number), activation=activation1)(x1)
else:
global_info = x1
# concatenated feature, shape: (batch_size, n_steps, dim1+dim2)
x2 = Concatenate_1()([global_info,local_info])
else:
x2 = local_info
# x2 = Dropout(0.2, name = 'new_dropout_2')(x2)
# current configuration: dense1 + conv1 + conv2
if dim2>0:
x2 = Conv1D(dim2, 1, padding='same', activation=activation1, strides=1, name = 'conv%d_gumbel_%d'%(n_local_conv+2,number))(x2)
if ('batchnorm1' in config) and config['batchnorm1']==1:
x2 = TimeDistributed(BatchNormalization(),name ='conv%d_gumbel_bn%d'%(n_local_conv+2,number))(x2)
if 'regularizer1' in config:
regularizer1 = config['regularizer1']
else:
# regularizer1 = 1e-05
regularizer1 = 0
if 'regularizer2' in config:
regularizer2 = config['regularizer2']
else:
regularizer2 = 1e-05
activation3 = config['activation3']
print(activation3)
# type_id: 1: not using regularization (need to be changed)
# type_id: 2: using regularization and not using regularization
if type_id==1:
# x2 = Conv1D(1, 1, padding='same', activation=None, strides=1, name = 'conv5_gumbel_%d'%(number))(x2)
# x2 = Conv1D(1, 1, padding='same', activation=None, strides=1, name = 'logits_T_%d'%(number))(x2)
x2 = TimeDistributed(Dense(1),name='dense_1_%d'%(number))(x2)
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
x2 = TimeDistributed(Activation(activation3),name='logits_T_%d'%(number))(x2)
elif type_id==2:
if activation3=='ReLU' or activation3=='relu':
x2 = TimeDistributed(Dense(1,activation='linear',
kernel_regularizer=regularizers.l2(regularizer2),
activity_regularizer=regularizers.l1(regularizer1)),
name='dense_1_%d'%(number))(x2)
# x2 = Dense(1,name='dense_1_%d'%(number),activation='linear',
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x2)
# x2 = TimeDistributed(Activation(activation2,name='activation_1_%d'%(number)))(x2)
if not('batchnorm2' in config) or config['batchnorm2']==1:
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
if activation3=='ReLU':
# thresh1, thresh2 = 1.0-1e-07, 1e-07
thresh1, thresh2 = 1.0, 0.0
print(thresh1, thresh2)
x2 = TimeDistributed(ReLU(max_value=thresh1,threshold=thresh2),name='logits_T_%d'%(number))(x2)
# x2 = TimeDistributed(ReLU(max_value=thresh1),name='logits_T_%d'%(number))(x2)
else:
# x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
x2 = TimeDistributed(Activation('relu'),name='logits_T_%d'%(number))(x2)
# x2 = ReLU(max_value=thresh1,threshold=thresh2,name='logits_T_%d'%(number))(x2)
# x2 = Activation(Lambda(lambda x: relu(x, max_value=1.0)))(x2)
# elif activation2=='sigmoid':
# x2 = TimeDistributed(Dense(1,name='dense_1_%d'%(number)))(x2)
# x2 = TimeDistributed(BatchNormalization(name='batchnorm_1_%d'(number)))(x2)
# x2 = TimeDistributed(Activation(activation2,name='activation_1_%d'%(number)))(x2)
else:
# if ('regularizer_1' in config) and (config['regularizer_1']==1):
# x2 = TimeDistributed(Dense(1,
# activation=activation3,
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1),
# ),name='logits_T_%d'%(number))(x2)
# x2 = TimeDistributed(Activation(activation2,name='activation_1_%d'%(number)))(x2)
flag1 = 0
# if ('regularizer_1' in config) and (config['regularizer_1']==1):
# print(regularizer1,regularizer2)
# if regularizer1>0:
# print('regularization after activation',activation3)
# flag1 = 1
if ('regularizer_1' in config):
print(regularizer1,regularizer2)
if config['regularizer_1']==1:
if regularizer1>0:
print('regularization after activation',activation3)
flag1 = 1
else:
flag1 = config['regularizer_1']
if flag1==1:
x2 = TimeDistributed(Dense(1,
activation=activation3,
kernel_regularizer=regularizers.l2(regularizer2),
activity_regularizer=regularizers.l1(regularizer1)
),name='logits_T_%d'%(number))(x2)
# x2 = TimeDistributed(Activation(activation3,
# activity_regularizer=regularizers.l1(regularizer1)),
# name='logits_T_%d'%(number))(x2)
else:
x2 = TimeDistributed(Dense(1,
kernel_regularizer=regularizers.l2(regularizer2)
),name='dense_1_%d'%(number))(x2)
# if regularizer2>0:
# x2 = TimeDistributed(Dense(1,
# kernel_regularizer=regularizers.l2(regularizer2),
# ),name='dense_1_%d'%(number))(x2)
# else:
# x2 = TimeDistributed(Dense(1),name='dense_1_%d'%(number))(x2)
# if flag1!=2:
# x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
x2 = TimeDistributed(Activation(activation3),name='logits_T_%d'%(number))(x2)
# elif 'regularizer_1' in config and config['regularizer_1']==2:
# x2 = TimeDistributed(Dense(1,
# kernel_regularizer=regularizers.l2(regularizer2),
# ),name='dense_1_%d'%(number))(x2)
# x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
# x2 = TimeDistributed(Activation(activation3),name='logits_T_%d'%(number))(x2)
# else:
# x2 = TimeDistributed(Dense(1),name='dense_1_%d'%(number))(x2)
# x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
# x2 = TimeDistributed(Activation(activation3),name='logits_T_%d'%(number))(x2)
else:
pass
return x2
def construct_gumbel_selector1_1(input1,config,number,type_id=2):
if 'regularizer1' in config:
regularizer1 = config['regularizer1']
else:
regularizer1 = 1e-05
if 'regularizer2' in config:
regularizer2 = config['regularizer2']
else:
regularizer2 = 1e-05
activation3 = config['activation3']
print(activation3)
# type_id: 1: not using regularization (need to be changed)
# type_id: 2: using regularization and not using regularization
x2 = input1
if type_id==1:
# x2 = Conv1D(1, 1, padding='same', activation=None, strides=1, name = 'conv5_gumbel_%d'%(number))(x2)
# x2 = Conv1D(1, 1, padding='same', activation=None, strides=1, name = 'logits_T_%d'%(number))(x2)
x2 = TimeDistributed(Dense(1),name='dense_2_local_%d'%(number))(x2)
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_2_local_%d'%(number))(x2)
x2 = TimeDistributed(Activation(activation3),name='logits_T_local_%d'%(number))(x2)
elif type_id==2:
if activation3=='ReLU' or activation3=='relu':
x2 = TimeDistributed(Dense(1,activation='linear',
kernel_regularizer=regularizers.l2(regularizer2),
activity_regularizer=regularizers.l1(regularizer1)),
name='dense_1_local_%d'%(number))(x2)
if not('batchnorm2' in config) or config['batchnorm2']==1:
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_2_local_%d'%(number))(x2)
if activation3=='ReLU':
# thresh1, thresh2 = 1.0-1e-07, 1e-07
thresh1, thresh2 = 1.0, 0.0
print(thresh1, thresh2)
x2 = TimeDistributed(ReLU(max_value=thresh1,threshold=thresh2),name='logits_T_local_%d'%(number))(x2)
# x2 = TimeDistributed(ReLU(max_value=thresh1),name='logits_T_%d'%(number))(x2)
else:
# x2 = TimeDistributed(BatchNormalization(),name='batchnorm_1_%d'%(number))(x2)
x2 = TimeDistributed(Activation('relu'),name='logits_T_local_%d'%(number))(x2)
else:
flag1 = 0
if ('regularizer_1' in config):
print(regularizer1,regularizer2)
if config['regularizer_1']==1:
if regularizer1>0:
print('regularization after activation',activation3)
flag1 = 1
else:
flag1 = config['regularizer_1']
if flag1==1:
x2 = TimeDistributed(Dense(1,
activation=activation3,
kernel_regularizer=regularizers.l2(regularizer2),
activity_regularizer=regularizers.l1(regularizer1),
),name='logits_T_local_%d'%(number))(x2)
else:
x2 = TimeDistributed(Dense(1,
kernel_regularizer=regularizers.l2(regularizer2)),name='dense_2_local_%d'%(number))(x2)
if flag1!=2:
x2 = TimeDistributed(BatchNormalization(),name='batchnorm_2_local_%d'%(number))(x2)
x2 = TimeDistributed(Activation(activation3),name='logits_T_local_%d'%(number))(x2)
else:
pass
return x2
# construct gumbel selector 1
def construct_gumbel_selector1_sequential(input1,config,number=1,type_id=2):
feature_dim, output_dim = config['feature_dim'], config['output_dim']
n_steps = config['context_size']
lr = config['lr']
activation1 = config['activation']
# activation1 = 'relu'
feature_dim_vec1 = config['feature_dim_vec']
if 'local_conv_size' in config:
local_conv_size = config['local_conv_size']
else:
local_conv_size = 3
# input1 = Input(shape = (n_steps,feature_dim))
units_1 = config['units1']
if units_1>0:
# encode the input, shape: (batch_size,n_steps,units_1)
dense_layer_1 = TimeDistributed(Dense(units_1),name='dense1_gumbel_local_%d'%(number))(input1)
dense_layer_1 = TimeDistributed(BatchNormalization())(dense_layer_1)
dense_layer_1 = TimeDistributed(Activation(activation1),name = 'dense_gumbel_local_%d'%(number))(dense_layer_1)
else:
dense_layer_1 = input1
# default: n_filter1:50, dim1:25, n_filter2: 50, dim2: 25, n_local_conv: 0, concat: 0
# n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0], feature_dim_vec1[1], feature_dim_vec1[2], feature_dim_vec1[3], feature_dim_vec1[4], feature_dim_vec1[5]
n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0:6]
if n_filter1>0:
# layer_1 = TimeDistributed(Conv1D(n_filter1, local_conv_size, padding='same',
# activation=activation1, strides=1), name = 'conv1_gumbel_local_%d'%(number))(dense_layer_1)
x1 = TimeDistributed(Conv1D(n_filter1, local_conv_size,
padding='same', strides=1))(input1)
x1 = TimeDistributed(BatchNormalization(),name='batchnorm_1_local_%d'%(number))(x1)
layer_1 = TimeDistributed(Activation(activation1),name = 'conv1_gumbel_local_%d'%(number))(x1)
else:
layer_1 = dense_layer_1
local_info = layer_1
if n_local_conv>0:
for i in range(n_local_conv):
local_info = TimeDistributed(Conv1D(n_filter2, local_conv_size, padding='same', activation=activation1, strides=1), name = 'conv%d_gumbel_local_%d'%(i+2,number))(local_info)
# global info, shape: (batch_size, feature_dim)
if concat>0:
x1 = TimeDistributed(GlobalMaxPooling1D(),name = 'new_global_max_pooling1d_%d'%(number))(layer_1)
if dim1>0:
global_info = TimeDistributed(Dense(dim1,activation=activation1), name = 'new_dense_%d'%(number))(x1)
else:
global_info = x1
# concatenated feature, shape: (batch_size, n_steps, dim1+dim2)
x2 = TimeDistributed(Concatenate_1(),name='concatenate_local_1')([global_info,local_info])
else:
x2 = local_info
# x2 = Dropout(0.2, name = 'new_dropout_2')(x2)
# current configuration: dense1 + conv1 + conv2
if dim2>0:
x2 = TimeDistributed(Conv1D(dim2, 1, padding='same', activation=activation1, strides=1), name = 'conv%d_gumbel_local_%d'%(n_local_conv+2,number))(x2)
if ('batchnorm1' in config) and config['batchnorm1']==1:
x2 = TimeDistributed(BatchNormalization(),name ='conv%d_gumbel_local_bn%d'%(n_local_conv+2,number))(x2)
x2_local = construct_gumbel_selector1_1(x2,config,number=number+1,type_id=type_id)
return x2_local
# construct gumbel selector 1: feature vector
def construct_basic1(input1,config,number=1,type_id=1):
feature_dim, output_dim = config['feature_dim'], config['output_dim']
n_steps = config['context_size']
lr = config['lr']
activation1 = config['activation']
# activation1 = 'relu'
feature_dim_vec1 = config['feature_dim_vec_basic']
if 'local_conv_size' in config:
local_conv_size = config['local_conv_size']
else:
local_conv_size = 3
# # input1 = Input(shape = (n_steps,feature_dim))
# units_1 = config['units1']
# if units_1>0:
# # encode the input, shape: (batch_size,n_steps,units_1)
# dense_layer_1 = TimeDistributed(Dense(units_1,name='dense_0'))(input1)
# else:
# dense_layer_1 = input1
# default: n_filter1:50, dim1:25, n_filter2: 50, dim2: 25, n_local_conv: 0, concat: 0
n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0:6]
print(n_filter1, n_filter2, n_local_conv, dim1, dim2, concat)
if n_filter1>0:
x1 = Conv1D(n_filter1, 1, padding='same', strides=1, name = 'conv1_basic1_%d'%(number))(input1)
x1 = BatchNormalization(name='batchnorm_1_%d'%(number))(x1)
layer_1 = Activation(activation1,name='conv1_basic_%d'%(number))(x1)
else:
layer_1 = input1
local_info = layer_1
if n_local_conv>0:
for i in range(n_local_conv):
local_info = Conv1D(n_filter2, local_conv_size, padding='same', activation=activation1, strides=1, name = 'conv%d_basic_%d'%(i+2,number))(local_info)
# global info, shape: (batch_size, feature_dim)
if concat>0:
x1 = GlobalMaxPooling1D(name = 'new_global_max_pooling1d_%d'%(number))(layer_1)
if dim1>0:
global_info = Dense(dim1, name = 'new_dense_%d'%(number), activation=activation1)(x1)
else:
global_info = x1
# concatenated feature, shape: (batch_size, n_steps, dim1+dim2)
x2 = Concatenate_1()([global_info,local_info])
else:
x2 = local_info
# x2 = Dropout(0.2, name = 'new_dropout_2')(x2)
# current configuration: dense1 + conv1 + conv2
if dim2>0:
x2 = Conv1D(dim2, 1, padding='same', activation=activation1, strides=1, name = 'conv%d_basic_%d'%(n_local_conv+2,number))(x2)
if ('batchnorm1' in config) and config['batchnorm1']==1:
x2 = TimeDistributed(BatchNormalization(),name ='conv%d_basic_bn%d'%(n_local_conv+2,number))(x2)
return x2
# construct gumbel selector 1
def get_modelpre_basic2(input1,config,number=1,type_id=1):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
# lr = config['lr']
activation1 = config['activation']
# activation1 = 'relu'
feature_dim_vec1 = config['feature_dim_vec']
# input1 = Input(shape = (n_steps,feature_dim))
units_1 = config['units1']
if units_1>0:
# encode the input, shape: (batch_size,n_steps,units_1)
dense_layer_1 = Dense(units_1,name='dense_0')(input1)
else:
dense_layer_1 = input1
# default: n_filter1:50, dim1:25, n_filter2: 50, dim2: 25, n_local_conv: 0, concat: 0
n_filter1, dim1, n_filter2, dim2, n_local_conv, concat = feature_dim_vec1[0], feature_dim_vec1[1], feature_dim_vec1[2], feature_dim_vec1[3], feature_dim_vec1[4], feature_dim_vec1[5]
if n_filter1>0:
# layer_1 = Conv1D(n_filter1, 1, padding='same', activation=activation1, strides=1, name = 'conv1_gumbel_%d'%(number))(dense_layer_1)
layer_1 = Dense(n_filter1, activation=activation1, name = 'conv1_gumbel_%d'%(number))(dense_layer_1)
else:
layer_1 = dense_layer_1
# local info
if n_local_conv>0:
layer_2 = Dense(n_filter2, activation=activation1, name = 'conv2_gumbel_%d'%(number))(layer_1)
if n_local_conv>1:
local_info = Dense(n_filter2, activation=activation1, name = 'conv3_gumbel_%d'%(number))(layer_2)
else:
local_info = layer_2
else:
local_info = layer_1
x2 = local_info
# x2 = Dropout(0.2, name = 'new_dropout_2')(x2)
# current configuration: dense1 + conv1 + conv2
if dim2>0:
x2 = Dense(dim2, activation=activation1, name = 'conv4_%d'%(number))(x2)
return x2
def get_model2a1_basic1(input1,config):
feature_dim, output_dim, n_steps = config['feature_dim'], config['output_dim'], config['context_size']
activation = config['activation']
activation2 = config['activation2']
activation_self = config['activation_self']
if 'feature_dim3' in config:
feature_dim3 = config['feature_dim3']
else:
feature_dim3 = []
regularizer2 = config['regularizer2']
# regularizer2_2 = config['regularizer2_2']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
# method 21: attention1:1, method 22: attention1:0
if config['attention1']==1:
layer_1, attention1 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention1')(input1)
else:
layer_1 = input1
# biLSTM_layer1 = Bidirectional(LSTM(input_shape=(n_steps, feature_dim),
# units=output_dim,
# return_sequences = True,
# recurrent_dropout = 0.1),name='bilstm1')
if 'regularizer2_2' in config:
regularizer2_2 = config['regularizer2_2']
else:
regularizer2_2 = 1e-05
biLSTM_layer1 = Bidirectional(LSTM(
units=output_dim,
kernel_regularizer=regularizers.l2(regularizer2_2),
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(layer_1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
if activation2!='':
x1 = Activation(activation2,name='activation2_2')(x1)
# x1 = Activation(activation2,name='activation2_2')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
if config['attention2']==1:
x1, attention2 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention2')(x1)
# if config['concatenate_2']==1:
# global_info = GlobalMaxPooling1D(name='global_pooling_1')(x1)
# x1 =Concatenate_1(name='concatenate_1')([x1,global_info])
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
cnt1 = 0
for t_feature_dim3 in feature_dim3:
cnt1 += 1
x1 = TimeDistributed(Dense(t_feature_dim3,
kernel_regularizer=regularizers.l2(regularizer2)),
name = 'conv1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_3_%d'%(cnt1))(x1)
if 'output_dim_1' in config:
output_dim1 = config['output_dim_1']
else:
output_dim1 = 1
output = Dense(output_dim1,name='dense2')(x1)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation(activation_basic,name='activation2')(output)
return output
def get_model2a1_basic1_2(input1,config):
feature_dim, output_dim, n_steps = config['feature_dim'], config['output_dim'], config['context_size']
activation = config['activation']
activation2 = config['activation2']
activation_self = config['activation_self']
if 'feature_dim3' in config:
feature_dim3 = config['feature_dim3']
else:
feature_dim3 = []
regularizer2 = config['regularizer2']
# regularizer2_2 = config['regularizer2_2']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
# method 21: attention1:1, method 22: attention1:0
if config['attention1']==1:
layer_1, attention1 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention1')(input1)
else:
layer_1 = input1
# biLSTM_layer1 = Bidirectional(LSTM(input_shape=(n_steps, feature_dim),
# units=output_dim,
# return_sequences = True,
# recurrent_dropout = 0.1),name='bilstm1')
if 'regularizer2_2' in config:
regularizer2_2 = config['regularizer2_2']
else:
regularizer2_2 = 1e-05
biLSTM_layer1 = Bidirectional(LSTM(
activation='tanh',
units=output_dim,
kernel_regularizer=regularizers.l2(regularizer2_2),
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(layer_1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
if activation2!='':
x1 = Activation(activation2,name='activation2_2')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
if config['attention2']==1:
x1, attention2 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention2')(x1)
# if config['concatenate_2']==1:
# global_info = GlobalMaxPooling1D(name='global_pooling_1')(x1)
# x1 =Concatenate_1(name='concatenate_1')([x1,global_info])
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
cnt1 = 0
for t_feature_dim3 in feature_dim3:
cnt1 += 1
x1 = TimeDistributed(Dense(t_feature_dim3,
kernel_regularizer=regularizers.l2(regularizer2)),
name = 'conv1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_3_%d'%(cnt1))(x1)
if 'output_dim_1' in config:
output_dim1 = config['output_dim_1']
else:
output_dim1 = 1
output = Dense(output_dim1,name='dense2')(x1)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation(activation_basic,name='activation2')(output)
return output
def get_model2a1_basic2(input1,config):
feature_dim, output_dim, n_steps = config['feature_dim'], config['output_dim'], config['context_size']
activation = config['activation']
activation_self = config['activation_self']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
# method 21: attention1:1, method 22: attention1:0
if 'n_layer_basic' in config:
n_layer = config['n_layer_basic']
else:
n_layer = 2
x1 = input1
for i in range(0,n_layer):
x1 = Dense(output_dim,name='dense%d'%(i+2))(x1)
x1 = BatchNormalization(name='batchnorm%d'%(i+2))(x1)
x1 = Activation(activation_self,name='activation%d'%(i+2))(x1)
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
output = Dense(1,name='dense_basic2')(x1)
output = BatchNormalization(name='batchnorm_basic2')(output)
output = Activation(activation_basic,name='activation_basic2')(output)
return output
# get_model2a1_basic1_2 from utility_1_5
def get_model2a1_basic1_2_ori(input1,config):
feature_dim, output_dim, n_steps = config['feature_dim'], config['output_dim'], config['context_size']
activation = config['activation']
activation2 = config['activation2']
activation_self = config['activation_self']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
# method 21: attention1:1, method 22: attention1:0
if config['attention1']==1:
layer_1, attention1 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention1')(input1)
else:
layer_1 = input1
if ('layer_norm' in config) and (config['layer_norm']>0):
if (config['layer_norm']==1):
activation_2 = "linear"
else:
if activation2!='':
activation_2 = activation2
else:
activation_2 = "tanh"
biLSTM_layer1 = Bidirectional(LSTM(
input_shape=(n_steps, feature_dim),
units=output_dim,
activation=activation_2,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(layer_1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
if (config['layer_norm']==1) and (activation2!=''):
x1 = Activation(activation2,name='activation1')(x1)
print('layer_norm',config['layer_norm'],activation2,activation_2)
else:
print('layer_norm',config['layer_norm'])
biLSTM_layer1 = Bidirectional(LSTM(
input_shape=(n_steps, feature_dim),
units=output_dim,
activation=activation2,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(layer_1)
print("get_model2a1_basic1_2",activation2)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
if config['attention2']==1:
x1, attention2 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention2')(x1)
# x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
output = Dense(1,name='dense2')(x1)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation(activation_basic,name='activation2')(output)
return output
# multiple convolution layers, self-attention
def get_model2a1_attention1_1(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (n_steps,feature_dim))
typeid = 0
number = 1
layer_1 = construct_gumbel_selector1(input1,config,number,typeid)
output = get_model2a1_basic1(layer_1,config)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
# adam = Adam(lr = lr, clipnorm=1.0, clipvalue=1.0)
adam = Adam(lr = lr, clipnorm=CLIPNORM1)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = loss_function)
model.summary()
return model
# method 56: single network for predicting signals
# with multiple fully connected layers
def get_model2a1_attention_1_2_2_single(config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (feature_dim,))
units1 = config['units1']
config['units1'] = config['units2']
dense_layer_output1 = get_modelpre_basic2(input1,config)
output = get_model2a1_basic2(dense_layer_output1,config)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = lr, clipnorm=CLIPNORM1)
model.compile(adam,loss = loss_function)
model.summary()
return model
# convolution
def get_model2a1_convolution_pre_1(input1,config):
# input1 = Input(shape = (input_shape,4))
# conv_1 = [[[128,20,1e-04,1,1,'relu',10,10,0.2],[64,20,1e-04,1,1,'relu',10,10,0.2]],
# [[32,0.1,0.5]],[1],[[50,1,'relu',0]]]
conv_list_ori = config['conv_1'] # 0: conv_layers: 1: bilstm 2: processing output of lstm 3: dense_layers
activation_self = config['activation_self']
x1 = input1
conv_list1 = conv_list_ori[0]
cnt1 = 0
for conv_1 in conv_list1:
cnt1 = cnt1+1
n_filters, kernel_size1, regularizer2, stride, dilation_rate1, bnorm, boundary, activation, pool_length1, stride1, drop_out_rate = conv_1
x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, activation = "linear",
padding = boundary,
kernel_regularizer=regularizers.l2(regularizer2),
dilation_rate = dilation_rate1,
name = 'conv1_%d'%(cnt1))(x1)
# x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x1)
if bnorm>0:
x1 = BatchNormalization(name='bnorm1_%d'%(cnt1))(x1)
print(n_filters,kernel_size1,activation,pool_length1,drop_out_rate)
x1 = Activation(activation,name='activation1_%d'%(cnt1))(x1)
if pool_length1>1:
x1 = MaxPooling1D(pool_size = pool_length1, strides = stride1, name='pooling_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout1_%d'%(cnt1))(x1)
# if config['attention1']==1:
# x1, attention1 = SeqSelfAttention(return_attention=True,
# attention_activation=activation_self,
# name='attention1')(x1)
conv_list2 = conv_list_ori[1]
cnt1 = 0
for conv_1 in conv_list2:
cnt1 = cnt1+1
output_dim, recurrent_dropout_rate, drop_out_rate = conv_1[0:3]
biLSTM_layer1 = Bidirectional(LSTM(units=output_dim,
return_sequences = True,
recurrent_dropout = recurrent_dropout_rate),name='bilstm%d'%(cnt1))
x1 = biLSTM_layer1(x1)
x1 = LayerNormalization(name='layernorm2_%d'%(cnt1))(x1)
x1 = Dropout(drop_out_rate,name='dropout2_%d'%(cnt1))(x1)
connection = conv_list_ori[2]
cnt1 = 0
for conv_1 in connection[0]:
cnt1 = cnt1+1
fc1_output_dim, bnorm, activation, drop_out_rate = conv_1
x1 = Dense(fc1_output_dim,name='dense3_%d_1'%(cnt1))(x1)
if bnorm>0:
x1 = BatchNormalization(name='bnorm3_%d_1'%(cnt1))(x1)
x1 = Activation(activation,name='activation3_%d_1'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout3_%d_1'%(cnt1))(x1)
if config['attention2']==1:
x1, attention2 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention2')(x1)
flag1 = connection[-1][0]
if flag1==1:
x1 = Flatten(name='flatten1')(x1)
else:
x1 = GlobalMaxPooling1D(name ='global_max_pooling1d_1')(x1)
conv_list3 = conv_list_ori[3]
cnt1 = 0
for conv_1 in conv_list3:
cnt1 = cnt1+1
fc1_output_dim, bnorm, activation, drop_out_rate = conv_1
x1 = Dense(fc1_output_dim,name='dense3_%d'%(cnt1))(x1)
if bnorm>0:
x1 = BatchNormalization(name='bnorm3_%d'%(cnt1))(x1)
x1 = Activation(activation,name='activation3_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout3_%d'%(cnt1))(x1)
dense_layer_output = x1
return dense_layer_output
# convolution layer + selector + LSTM + pooling
# 2D feature (n_step_local,feature_dim) to 1D
# from get_model2a1_basic5_convolution
def get_model2a1_convolution_pre(input_local,select_config):
return_sequences_flag, sample_local, pooling_local = select_config['local_vec_1']
# print(feature_dim1, feature_dim2, return_sequences_flag)
conv_list_ori = select_config['local_conv_list_ori'] # 0: conv_layers: 1: bilstm 2: processing output of lstm 3: dense_layers
cnt1 = 0
boundary_vec = ['same','valid']
x1 = input_local
conv_list1 = conv_list_ori[0]
for conv_1 in conv_list1:
cnt1 = cnt1+1
if len(conv_1)==0:
continue
n_filters, kernel_size1, stride, regularizer2, dilation_rate1, boundary, bnorm, activation, pool_length1, stride1, drop_out_rate = conv_1
x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, padding=boundary_vec[boundary], activation = "linear",
kernel_regularizer=regularizers.l2(regularizer2),
dilation_rate = dilation_rate1,
name = 'conv1_pre_%d'%(cnt1))(x1)
# x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x1)
x1 = BatchNormalization(name='bnorm1_pre_%d'%(cnt1))(x1)
print(n_filters,kernel_size1,activation,pool_length1,drop_out_rate)
x1 = Activation(activation,name='activation1_pre_%d'%(cnt1))(x1)
if pool_length1>1:
x1 = MaxPooling1D(pool_size = pool_length1, strides = stride1, name='pooling_pre_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout1_pre_%d'%(cnt1))(x1)
layer_1 = x1
if sample_local>=1:
logits_T_local = construct_gumbel_selector1_sequential(layer_1,select_config,number=1,type_id=2)
if sample_local==1:
tau, k = select_config['tau'], select_config['n_select']
typeid_sample, activation3 = select_config['typeid_sample'], select_config['activation3']
print('sample_attention',tau,k,activation3,typeid_sample)
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
else:
pass
attention1 = Sample_Concrete1(tau,k,n_step_local,typeid_sample,name='Sample_Concrete1_local')(logits_T_local) # output shape: (batch_size, n_step_local, 1)
else:
attention1 = logits_T_local
layer_1 = Multiply()([layer_1, attention1])
conv_list2 = conv_list_ori[1]
cnt1 = 0
layer_1 = x1
for conv_1 in conv_list2:
cnt1 = cnt1+1
if len(conv_1)==0:
continue
output_dim, activation2, regularizer2, recurrent_dropout_rate, drop_out_rate, layer_norm = conv_1[0:6]
biLSTM_layer1 = Bidirectional(LSTM(units=output_dim,
activation = activation2,
return_sequences = True,
kernel_regularizer = keras.regularizers.l2(regularizer2),
recurrent_dropout = recurrent_dropout_rate),name='bilstm2_%d'%(cnt1))
x1 = biLSTM_layer1(x1)
if layer_norm>0:
x1 = LayerNormalization(name='layernorm2_%d'%(cnt1))(x1)
x1 = Dropout(drop_out_rate,name='dropout2_%d'%(cnt1))(x1)
if return_sequences_flag==True:
# x1 = BatchNormalization()(x1)
# x1 = TimeDistributed(LayerNormalization(),name='layernorm_local_1')(x1)
if select_config['concatenate_1']==1:
# x1 = TimeDistributed(Concatenate(axis=-1),name='concatenate_local_1')([x1,layer_1])
x1 = Concatenate(axis=-1,name='concatenate_local_1')([x1,layer_1])
connection = conv_list_ori[2]
cnt1 = 0
for conv_1 in connection:
cnt1 = cnt1+1
if len(conv_1)==0:
continue
fc1_output_dim, bnorm, activation, drop_out_rate = conv_1
x1 = Dense(fc1_output_dim,name='dense3_%d_1'%(cnt1))(x1)
if bnorm>0:
x1 = BatchNormalization(name='bnorm3_%d_1'%(cnt1))(x1)
x1 = Activation(activation,name='activation3_%d_1'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout3_%d_1'%(cnt1))(x1)
if select_config['attention2_local']==1:
activation_self = select_config['activation_self']
x1, attention2 = SeqSelfAttention(return_attention=True,
attention_activation=activation_self,
name='attention_local_1')(x1)
if pooling_local==1:
x1 = GlobalMaxPooling1D(name='global_pooling_local_1')(x1)
else:
x1 = Flatten(name='Flatten_local_1')(x1)
conv_list3 = conv_list_ori[3]
cnt1 = 0
for conv_1 in conv_list3:
cnt1 = cnt1+1
if len(conv_1)==0:
continue
fc1_output_dim, bnorm, activation, drop_out_rate = conv_1
x1 = Dense(fc1_output_dim,
kernel_regularizer=regularizers.l2(1e-05),
name='dense3_%d'%(cnt1))(x1)
if bnorm>0:
x1 = BatchNormalization(name='bnorm3_%d'%(cnt1))(x1)
x1 = Activation(activation,name='activation3_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout3_%d'%(cnt1))(x1)
return x1
# convolution (original function)
def get_model2a1_convolution(config):
size1 = config['n_step_local_ori']
learning_rate = config['lr']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (size1,4))
dense_layer_output = get_model2a1_convolution_pre(input1,config)
conv_2 = config['conv_2'] # conv_2: [1,1,'sigmoid']
n_dim, bnorm, activation = conv_2[0:3]
output = Dense(n_dim)(dense_layer_output)
if bnorm>0:
output = BatchNormalization()(output)
output = Activation(activation)(output)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = learning_rate)
model.compile(adam,loss = loss_function)
model.summary()
return model
# network1 for estimating weights, self-attention and network2 for predicting signals
# with gumbel sampling and multiple convolution layers
def get_model2a1_attention_1_2_2_sample(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (n_steps,feature_dim))
number = 1
typeid = 2
logits_T = construct_gumbel_selector1(input1,config,number,typeid)
# k = 10
if not('sample_attention' in config) or config['sample_attention']==1:
tau = 0.5
k = 5
print('sample_attention',tau,k,typeid,activation3,typeid_sample)
if 'tau' in config:
tau = config['tau']
if 'n_select' in config:
k = config['n_select']
if typeid<2:
attention1 = Sample_Concrete(tau,k,n_steps)(logits_T)
else:
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
elif activation3=='sigmoid':
typeid_sample = 3
else:
pass
attention1 = Sample_Concrete1(tau,k,n_steps,typeid_sample)(logits_T) # output shape: (batch_size, n_steps, 1)
else:
attention1 = logits_T
# encode the input 2
units_2 = config['units2']
if units_2>0:
dim2 = units_2
dense_layer_output1 = TimeDistributed(Dense(units_2,name='dense_2'))(input1)
else:
dim2 = feature_dim
dense_layer_output1 = input1
if config['select2']==1:
units1 = config['units1']
config['units1'] = 0
typeid = 0
number = 2
dense_layer_output1 = construct_gumbel_selector1(dense_layer_output1,config,number,typeid)
config['units1'] = units1
layer_1 = Multiply()([dense_layer_output1, attention1])
output = get_model2a1_basic1(layer_1,config)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = lr, clipnorm=CLIPNORM1)
model.compile(adam,loss = loss_function)
model.summary()
return model
# network1 for estimating weights, self-attention and network2 for predicting signals
# with gumbel sampling and multiple convolution layers
def get_model2a1_attention_1_2_2_sample_1(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (n_steps,feature_dim))
number = 1
typeid = 2
logits_T = construct_gumbel_selector1(input1,config,number,typeid)
# k = 10
if not('sample_attention' in config) or config['sample_attention']==1:
tau = 0.5
k = 5
print('sample_attention',tau,k,typeid,activation3,typeid_sample)
if 'tau' in config:
tau = config['tau']
if 'n_select' in config:
k = config['n_select']
if typeid<2:
attention1 = Sample_Concrete(tau,k,n_steps)(logits_T)
else:
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
elif activation3=='sigmoid':
typeid_sample = 3
else:
pass
attention1 = Sample_Concrete1(tau,k,n_steps,typeid_sample)(logits_T) # output shape: (batch_size, n_steps, 1)
else:
attention1 = logits_T
# encode the input 2
units_2 = config['units2']
if units_2>0:
dim2 = units_2
dense_layer_output1 = TimeDistributed(Dense(units_2,name='dense_2'))(input1)
else:
dim2 = feature_dim
dense_layer_output1 = input1
if config['select2']==1:
units1 = config['units1']
config['units1'] = 0
typeid = 0
number = 2
dense_layer_output1 = construct_gumbel_selector1(dense_layer_output1,config,number,typeid)
config['units1'] = units1
layer_1 = Multiply()([dense_layer_output1, attention1])
output = get_model2a1_basic1(layer_1,config)
model = Model(input = input1, output = output)
adam = Adam(lr = lr, clipnorm=CLIPNORM1)
model.compile(adam,loss = loss_function)
model.summary()
return model
def find_optimizer(config):
init_lr, decay_rate1 = 0.005, 0.96
if 'init_lr' in config:
init_lr = config['init_lr']
if 'decay_rate1' in config:
decay_rate1 = config['decay_rate1']
lr_schedule = keras.optimizers.schedules.ExponentialDecay(initial_learning_rate=init_lr,
decay_steps=50,
decay_rate=decay_rate1,
staircase=True)
lr = config['lr']
lr_id = 1-config['lr_schedule']
vec1 = [lr,lr_schedule]
# adam = Adam(lr = lr, clipnorm=CLIPNORM1)
print(config['optimizer'])
if config['optimizer']=='SGD':
optimizer = keras.optimizers.SGD(learning_rate=vec1[lr_id], nesterov=True, clipnorm=CLIPNORM1)
elif config['optimizer']=='RMSprop':
optimizer = keras.optimizers.RMSprop(learning_rate=vec1[lr_id], clipnorm=CLIPNORM1)
elif config['optimizer']=='Adadelta':
optimizer = keras.optimizers.Adadelta(learning_rate=lr, clipnorm=CLIPNORM1)
elif config['optimizer']=='Adagrad':
optimizer = keras.optimizers.Adagrad(learning_rate=lr, clipnorm=CLIPNORM1)
elif config['optimizer']=='Nadam':
optimizer = keras.optimizers.Nadam(learning_rate=lr, clipnorm=CLIPNORM1)
else:
optimizer = keras.optimizers.Adam(learning_rate=vec1[lr_id], clipnorm=CLIPNORM1)
return optimizer
# convolution layer + selector + LSTM + pooling
# 2D feature (n_step_local,feature_dim) to 1D
def get_model2a1_basic5_convolution(input_local,select_config):
# n_step_local,feature_dim = select_config['input_shape_1']
n_step_local = select_config['n_step_local']
feature_dim1, feature_dim2, feature_dim3, return_sequences_flag, sample_local, pooling_local = select_config['local_vec_1']
# print(feature_dim1, feature_dim2, return_sequences_flag)
# conv_list1 = config['local_vec_1'] # 0: conv_layers: 1: bilstm 2: processing output of lstm 3: dense_layers
conv_list1 = select_config['local_conv_list1'] # 0: conv_layers: 1: bilstm 2: processing output of lstm 3: dense_layers
# input_local = Input(shape=(n_step_local,feature_dim))
# lstm_1 = Bidirectional(LSTM(feature_dim1, name = 'lstm_1'),
# name = 'bidirectional')(embedded_sequences)
# layer_1 = TimeDistributed(Conv1D(feature_dim1,1,padding='same',activation=None,strides=1),name='conv_local_1')(input_local)
# layer_1 = TimeDistributed(BatchNormalization(),name='batchnorm_local_1')(layer_1)
# layer_1 = TimeDistributed(Activation('relu'),name='activation_local_1')(layer_1)
cnt1 = 0
boundary_vec = ['same','valid']
x1 = input_local
for conv_1 in conv_list1:
cnt1 = cnt1+1
n_filters, kernel_size1, stride, regularizer2, dilation_rate1, boundary, bnorm, activation, pool_length1, stride1, drop_out_rate = conv_1
x1 = TimeDistributed(Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, padding=boundary_vec[boundary], activation = "linear",
kernel_regularizer=regularizers.l2(regularizer2),
dilation_rate = dilation_rate1),
name = 'conv1_pre_%d'%(cnt1))(x1)
# x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_pre_%d'%(cnt1))(x1)
print(n_filters,kernel_size1,activation,pool_length1,drop_out_rate)
x1 = TimeDistributed(Activation(activation),name='activation1_pre_%d'%(cnt1))(x1)
if pool_length1>1:
x1 = TimeDistributed(MaxPooling1D(pool_size = pool_length1, strides = stride1), name='pooling_pre_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = TimeDistributed(Dropout(drop_out_rate),name='dropout1_pre_%d'%(cnt1))(x1)
layer_1 = x1
if sample_local>=1:
logits_T_local = construct_gumbel_selector1_sequential(layer_1,select_config,number=1,type_id=2)
if sample_local==1:
tau, k = select_config['tau'], select_config['n_select']
typeid_sample, activation3 = select_config['typeid_sample'], select_config['activation3']
print('sample_attention',tau,k,activation3,typeid_sample)
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
else:
pass
attention1 = TimeDistributed(Sample_Concrete1(tau,k,n_step_local,typeid_sample),name='Sample_Concrete1_local')(logits_T_local) # output shape: (batch_size, n_steps, n_step_local, 1)
else:
attention1 = logits_T_local
layer_1 = Multiply()([layer_1, attention1])
# biLSTM_layer_1 = Bidirectional(LSTM(input_shape=(n_step_local, n_filters),
# units=feature_dim1,
# return_sequences = return_sequences_flag,
# kernel_regularizer = keras.regularizers.l2(1e-5),
# dropout=0.1,
# recurrent_dropout = 0.1),name='bilstm_local_1_1')
regularizer2 = select_config['regularizer2_2']
biLSTM_layer_1 = Bidirectional(LSTM(units=feature_dim1,
return_sequences = return_sequences_flag,
kernel_regularizer = keras.regularizers.l2(regularizer2),
dropout=0.1,
recurrent_dropout = 0.1),name='bilstm_local_1_1')
x1 = TimeDistributed(biLSTM_layer_1,name='bilstm_local_1')(layer_1)
if return_sequences_flag==True:
# x1 = BatchNormalization()(x1)
# x1 = TimeDistributed(LayerNormalization(),name='layernorm_local_1')(x1)
if select_config['concatenate_1']==1:
# x1 = TimeDistributed(Concatenate(axis=-1),name='concatenate_local_1')([x1,layer_1])
x1 = Concatenate(axis=-1,name='concatenate_local_1')([x1,layer_1])
if feature_dim2>0:
cnt1 += 1
x1 = TimeDistributed(Dense(feature_dim2,
kernel_regularizer=regularizers.l2(1e-05)),
name = 'conv1_pre_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_pre_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_pre_%d'%(cnt1))(x1)
if select_config['attention2_local']==1:
x1, attention2 = TimeDistributed(SeqSelfAttention(return_attention=True, attention_activation=activation_self),name='attention_local_1')(x1)
if pooling_local==1:
x1 = TimeDistributed(GlobalMaxPooling1D(),name='global_pooling_local_1')(x1)
else:
x1 = TimeDistributed(Flatten(),name='Flatten_local_1')(x1)
for t_feature_dim3 in feature_dim3:
cnt1 += 1
x1 = TimeDistributed(Dense(t_feature_dim3,
kernel_regularizer=regularizers.l2(1e-05)),
name = 'conv1_pre_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_pre_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_pre_%d'%(cnt1))(x1)
return x1
# convolution layer + pooling + dropout + dilated convolution
# 2D feature (n_step_local,feature_dim) to 1D
def get_model2a1_basic5_convolution1(input_local,select_config):
# n_step_local,feature_dim = select_config['input_shape_1']
# n_step_local = select_config['n_step_local']
# feature_dim1, feature_dim2, feature_dim3, return_sequences_flag, sample_local, pooling_local = select_config['local_vec_1']
# print(feature_dim1, feature_dim2, return_sequences_flag)
# conv_list1 = config['local_vec_1'] # 0: conv_layers; 1: bilstm; 2: processing output of lstm; 3: dense_layers
conv_list1 = select_config['local_conv_list1'] # 0: conv_layers; 1: bilstm 2; processing output of lstm; 3: dense_layers
conv_list2 = select_config['local_conv_list2'] # 0: dilated convolution layers
# input_local = Input(shape=(n_step_local,feature_dim))
# lstm_1 = Bidirectional(LSTM(feature_dim1, name = 'lstm_1'),
# name = 'bidirectional')(embedded_sequences)
# layer_1 = TimeDistributed(Conv1D(feature_dim1,1,padding='same',activation=None,strides=1),name='conv_local_1')(input_local)
# layer_1 = TimeDistributed(BatchNormalization(),name='batchnorm_local_1')(layer_1)
# layer_1 = TimeDistributed(Activation('relu'),name='activation_local_1')(layer_1)
cnt1 = 0
boundary_vec = ['same','valid']
x1 = input_local
for conv_1 in conv_list1:
cnt1 = cnt1+1
n_filters, kernel_size1, stride, regularizer2, dilation_rate1, boundary, bnorm, activation, pool_length1, stride1, drop_out_rate = conv_1
x1 = TimeDistributed(Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, padding=boundary_vec[boundary],
activation = "linear",
kernel_regularizer=regularizers.l2(regularizer2),
dilation_rate = dilation_rate1),
name = 'conv1_pre_%d'%(cnt1))(x1)
# x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_pre_%d'%(cnt1))(x1)
print(n_filters,kernel_size1,activation,pool_length1,drop_out_rate)
x1 = TimeDistributed(Activation(activation),name='activation1_pre_%d'%(cnt1))(x1)
if pool_length1>1:
x1 = TimeDistributed(MaxPooling1D(pool_size = pool_length1, strides = stride1), name='pooling_pre_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = TimeDistributed(Dropout(drop_out_rate),name='dropout1_pre_%d'%(cnt1))(x1)
layer_1 = x1
if len(conv_list1)>0:
x1 = TimeDistributed(Flatten(),name='Flatten_local_1')(x1)
# dilated convolution
cnt2 = 0
for conv_1 in conv_list2:
# dilation rate: 1, 2, 4, 8, 16, 32
if cnt2>=1:
x2 = Concatenate(axis=-1,name='concatenate_local_%d'%(cnt2))([x1,x2])
else:
x2 = x1
n_filters, kernel_size1, stride, regularizer2, dilation_rate1, boundary, bnorm, activation, pool_length1, stride1, drop_out_rate = conv_1
# x1 = TimeDistributed(Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, padding=boundary_vec[boundary], activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# dilation_rate = dilation_rate1),
# name = 'conv2_pre_%d'%(cnt2))(x2)
x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, strides = stride, padding=boundary_vec[boundary],
activation = "linear",
kernel_regularizer=regularizers.l2(regularizer2),
dilation_rate = dilation_rate1,
name = 'conv2_pre_%d'%(cnt2))(x2)
# x1 = Conv1D(filters = n_filters, kernel_size = kernel_size1, activation = "linear",
# kernel_regularizer=regularizers.l2(regularizer2),
# activity_regularizer=regularizers.l1(regularizer1))(x1)
x1 = BatchNormalization(name='bnorm2_pre_%d'%(cnt2))(x1)
print(n_filters,kernel_size1,activation,pool_length1,drop_out_rate)
x1 = Activation(activation,name='activation2_pre_%d'%(cnt2))(x1)
# if pool_length1>1:
# x1 = TimeDistributed(MaxPooling1D(pool_size = pool_length1, strides = stride1), name='pooling_pre_%d'%(cnt1))(x1)
if drop_out_rate>0:
x1 = Dropout(drop_out_rate,name='dropout2_pre_%d'%(cnt2))(x1)
cnt2 += 1
x2 = Concatenate(axis=-1,name='concatenate_local_%d'%(cnt2))([x1,x2])
return x2
# network1 for estimating weights, self-attention and network2 for predicting signals
# with gumbel sampling and multiple convolution layers
def get_model2a1_attention_1_2_2_sample5(config):
feature_dim, output_dim = config['feature_dim'], config['output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
loss_function = config['loss_function']
n_step_local_ori = config['n_step_local_ori']
input_region = Input(shape=(n_steps,n_step_local_ori,feature_dim))
layer_2 = get_model2a1_basic5_convolution(input_region,config)
print(layer_2.shape)
config['sample_attention'] = 1
if config['sample_attention']>=1:
number, typeid = 3, 2
units1 = config['units1']
config['units1'] = 0
logits_T = construct_gumbel_selector1(layer_2,config,number,typeid) # shape: (n_steps,1)
config['units1'] = units1
# k = 10
if config['sample_attention']==1:
tau, k, typeid_sample = config['tau'], config['n_select'], config['typeid_sample']
print('sample_attention',tau,k,typeid,activation3,typeid_sample)
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
else:
pass
attention1 = Sample_Concrete1(tau,k,n_steps,typeid_sample)(logits_T) # output shape: (batch_size, n_steps, 1)
else:
attention1 = logits_T
# encode the input 2
if config['select2']==1:
dense_layer_output1 = construct_basic1(layer_2,config)
else:
dense_layer_output1 = layer_2
dense_layer_output2 = Multiply()([dense_layer_output1, attention1])
else:
dense_layer_output2 = layer_2
config['activation2'] = ''
output = get_model2a1_basic1(dense_layer_output2,config)
# output = Activation("softmax")(output)
model = Model(input = input_region, output = output)
# adam = Adam(lr = lr, clipnorm=CLIPNORM1)
optimizer = Adam(learning_rate = lr, clipnorm=CLIPNORM1)
# optimizer = find_optimizer(config)
model.compile(optimizer=optimizer, loss = loss_function)
model.summary()
return model
# network1 for estimating weights, self-attention and network2 for predicting signals
# with gumbel sampling and multiple convolution layers
def get_model2a1_attention_1_2_2_sample5_1(config):
feature_dim, output_dim = config['feature_dim'], config['output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
loss_function = config['loss_function']
# n_step_local_ori, n_step_local = config['n_step_local_ori'], config['n_step_local']
n_step_local_ori = config['n_step_local_ori']
# input_shape_1 = [n_step_local,feature_dim]
# return_sequences_flag1 = True
# config.update({'input_shape_1':input_shape_1})
# encoder_1 = get_model2a1_basic5(config)
# encoder_1.summary()
input_region = Input(shape=(n_steps,n_step_local_ori,feature_dim))
# layer_2 = TimeDistributed(encoder_1,name='encoder_1')(input_region) # shape: (n_steps,feature_dim2*2)
# print(layer_2.shape)
# feature_dim1, feature_dim2, return_sequences_flag = config['local_vec_1']
# if return_sequences_flag==True:
# if config['attention2_local']==1:
# layer_2, attention2 = TimeDistributed(SeqSelfAttention(return_attention=True, attention_activation=activation_self),name='attention_local_1')(layer_2)
# layer_2 = TimeDistributed(GlobalMaxPooling1D(),name='global_pooling_local_1')(layer_2)
# layer_2 = get_model2a1_basic5_1(input_region,config)
layer_2 = get_model2a1_basic5_convolution(input_region,config)
print(layer_2.shape)
config['sample_attention'] = 1
if config['sample_attention']>=1:
number, typeid = 3, 2
units1 = config['units1']
config['units1'] = 0
logits_T = utility_1_5.construct_gumbel_selector2_ori(layer_2,config,number,typeid) # shape: (n_steps,1)
config['units1'] = units1
# k = 10
if config['sample_attention']==1:
tau, k, typeid_sample = config['tau'], config['n_select'], config['typeid_sample']
print('sample_attention',tau,k,typeid,activation3,typeid_sample)
if activation3=='linear':
typeid_sample = 1
elif activation3=='tanh':
typeid_sample = 5
else:
pass
attention1 = Sample_Concrete1(tau,k,n_steps,typeid_sample)(logits_T) # output shape: (batch_size, n_steps, 1)
else:
attention1 = logits_T
# encode the input 2
if config['select2']==1:
dense_layer_output1 = construct_basic1(layer_2,config)
else:
dense_layer_output1 = layer_2
dense_layer_output2 = Multiply()([dense_layer_output1, attention1])
else:
dense_layer_output2 = layer_2
config['activation2'] = ''
output = utility_1_5.get_model2a1_basic1_2(dense_layer_output2,config)
# output = Activation("softmax")(output)
model = Model(input = input_region, output = output)
# adam = Adam(lr = lr, clipnorm=CLIPNORM1)
optimizer = Adam(learning_rate = lr, clipnorm=CLIPNORM1)
# optimizer = find_optimizer(config)
model.compile(optimizer=optimizer, loss = loss_function)
model.summary()
return model
# dilated convolutions
def get_model2a1_attention_1_2_2_sample6(config):
feature_dim, output_dim = config['feature_dim'], config['output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
loss_function = config['loss_function']
n_step_local_ori = config['n_step_local_ori']
input_region = Input(shape=(n_steps,n_step_local_ori,feature_dim))
layer_2 = get_model2a1_basic5_convolution1(input_region,config)
print(layer_2.shape)
if 'feature_dim3' in config:
feature_dim3 = config['feature_dim3']
else:
feature_dim3 = []
regularizer2 = config['regularizer2']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
cnt1 = 0
x1 = layer_2
for t_feature_dim3 in feature_dim3:
cnt1 += 1
x1 = TimeDistributed(Dense(t_feature_dim3,
kernel_regularizer=regularizers.l2(regularizer2)),
name = 'conv1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_3_%d'%(cnt1))(x1)
if 'output_dim_1' in config:
output_dim1 = config['output_dim_1']
else:
output_dim1 = 1
output = Dense(output_dim1,name='dense2')(x1)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation(activation_basic,name='activation2')(output)
# output = Activation("softmax")(output)
model = Model(input = input_region, output = output)
# adam = Adam(lr = lr, clipnorm=CLIPNORM1)
optimizer = Adam(learning_rate = lr, clipnorm=CLIPNORM1)
# optimizer = find_optimizer(config)
model.compile(optimizer=optimizer, loss = loss_function)
model.summary()
return model
# dilated convolutions, sequence features
def get_model2a1_attention_1_2_2_sample6_1(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
n_steps = config['context_size']
lr = config['lr']
activation = config['activation']
activation_self = config['activation_self']
activation3 = config['activation3']
typeid_sample = config['typeid_sample']
if not('loss_function' in config):
loss_function = 'mean_squared_error'
else:
loss_function = config['loss_function']
input1 = Input(shape = (n_steps,feature_dim))
number = 1
# if 'typeid2' in config:
# typeid = config['typeid2']
# else:
# typeid = 2
typeid = 2
# logits_T = construct_gumbel_selector1(input1,config,number,typeid)
# k = 10
# encode the input 2
units_2 = config['units2']
if units_2>0:
dim2 = units_2
dense_layer_output1 = TimeDistributed(Dense(units_2,name='dense_2'))(input1)
else:
dim2 = feature_dim
dense_layer_output1 = input1
layer_1 = dense_layer_output1
config['local_conv_list1'] = []
layer_2 = get_model2a1_basic5_convolution1(layer_1,config)
print(layer_2.shape)
# output = get_model2a1_basic1(dense_layer_output2,config)
if 'feature_dim3' in config:
feature_dim3 = config['feature_dim3']
else:
feature_dim3 = []
regularizer2 = config['regularizer2']
# regularizer2_2 = config['regularizer2_2']
if 'activation_basic' in config:
activation_basic = config['activation_basic']
else:
activation_basic = 'sigmoid'
cnt1 = 0
x1 = layer_2
for t_feature_dim3 in feature_dim3:
cnt1 += 1
x1 = TimeDistributed(Dense(t_feature_dim3,
kernel_regularizer=regularizers.l2(regularizer2)),
name = 'conv1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(BatchNormalization(),name='bnorm1_3_%d'%(cnt1))(x1)
x1 = TimeDistributed(Activation('relu'),name='activation1_3_%d'%(cnt1))(x1)
if 'output_dim_1' in config:
output_dim1 = config['output_dim_1']
else:
output_dim1 = 1
output = Dense(output_dim1,name='dense2')(x1)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation(activation_basic,name='activation2')(output)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
# adam = Adam(lr = lr, clipnorm=CLIPNORM1)
optimizer = Adam(learning_rate = lr, clipnorm=CLIPNORM1)
# optimizer = find_optimizer(config)
model.compile(optimizer=optimizer, loss = loss_function)
model.summary()
return model
def get_model2a1_attention(input_shape,config):
feature_dim, output_dim, fc1_output_dim = config['feature_dim'], config['output_dim'], config['fc1_output_dim']
input1 = Input(shape = (None,feature_dim))
lr = config['lr']
activation = config['activation']
biLSTM_layer1 = Bidirectional(LSTM(input_shape=(None, feature_dim),
units=output_dim,
return_sequences = True,
recurrent_dropout = 0.1),name='bilstm1')
x1 = biLSTM_layer1(input1)
# x1 = BatchNormalization()(x1)
x1 = LayerNormalization(name='layernorm1')(x1)
# x1 = Activation('tanh',name='activation')(x1)
if activation!='':
x1 = Activation(activation,name='activation')(x1)
# x1 = Flatten()(x1)
x1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x_1, attention = SeqSelfAttention(return_attention=True, attention_activation='sigmoid',name='attention1')(x1)
# x1 = x_1[0]
# attention = x_1[1]
if fc1_output_dim>0:
dense1 = Dense(fc1_output_dim,name='dense1')(x1)
dense1 = BatchNormalization(name='batchnorm1')(dense1)
dense1 = Activation(activation,name='activation1')(dense1)
dense_layer_output = Dropout(0.5)(dense1)
else:
dense_layer_output = x1
# concat_layer_output = Concatenate(axis=-1)([dense_layer_output,input2])
output = Dense(1,name='dense2')(dense_layer_output)
output = BatchNormalization(name='batchnorm2')(output)
output = Activation("sigmoid",name='activation2')(output)
# output = Activation("softmax")(output)
model = Model(input = input1, output = output)
adam = Adam(lr = lr)
# model.compile(adam,loss = 'binary_crossentropy',metrics=['accuracy'])
# model.compile(adam,loss = 'kullback_leibler_divergence',metrics=['accuracy'])
# model.compile(adam,loss = 'mean_absolute_percentage_error')
model.compile(adam,loss = 'mean_squared_error')
model.summary()
return model
# select sample
def sample_select2a(x_mtx, y, idx_sel_list, tol=5, L=5):
num1 = len(idx_sel_list)
# L = 5
size1 = 2*L+1
feature_dim = x_mtx.shape[1]
vec1_list = np.zeros((num1,size1))
vec2_list = np.zeros((num1,size1))
# feature_list = np.zeros((num1,size1*feature_dim))
feature_list = np.zeros((num1,size1,feature_dim))
signal_list = np.zeros((num1,size1))
for i in range(0,num1):
temp1 = idx_sel_list[i]
t_chrom, t_serial = temp1[0], temp1[1]
id1 = []
for k in range(-L,L+1):
id2 = np.min((np.max((i+k,0)),num1-1))
id1.append(id2)
# print(id1)
vec1 = []
start1 = t_serial
t_id = i
for k in range(1,L+1):
id2 = id1[L-k]
if (idx_sel_list[id2,0]==t_chrom) and (idx_sel_list[id2,1]>=start1-tol):
vec1.append(id2)
t_id = id2
start1 = idx_sel_list[id2,1]
else:
vec1.append(t_id)
vec1 = vec1[::-1]
start1 = t_serial
t_id = i
vec1.append(t_id)
for k in range(1,L+1):
id2 = id1[L+k]
if (idx_sel_list[id2,0]==t_chrom) and (idx_sel_list[id2,1]<=start1+tol):
vec1.append(id2)
t_id = id2
start1 = idx_sel_list[id2,1]
else:
vec1.append(t_id)
t_feature = x_mtx[vec1]
vec1_list[i] = idx_sel_list[vec1,1]
vec2_list[i] = vec1
feature_list[i] = t_feature
signal_list[i] = y[vec1]
if i%50000==0:
print(i,t_feature.shape,vec1,vec1_list[i])
signal_list = np.expand_dims(signal_list, axis=-1)
return feature_list, signal_list, vec1_list, vec2_list
# select sample
def sample_select2a1(x_mtx, y, idx_sel_list, seq_list, tol=5, L=5):
num_sample = len(idx_sel_list)
num1 = len(seq_list)
size1 = 2*L+1
print(num_sample,num1,size1)
feature_dim = x_mtx.shape[1]
vec1_local = np.zeros((num_sample,size1),dtype=int)
vec1_serial = np.zeros((num_sample,size1),dtype=int)
feature_mtx = np.zeros((num_sample,size1,feature_dim),dtype=np.float32)
signal_mtx = np.zeros((num_sample,size1))
ref_serial = idx_sel_list[:,1]
for i in range(0,num1):
s1, s2 = seq_list[i][0], seq_list[i][1]+1
serial = ref_serial[s1:s2]
# print('start stop',s1,s2,serial)
num2 = len(serial)
t1 = np.outer(list(range(s1,s2)),np.ones(size1))
t2 = t1 + np.outer(np.ones(num2),list(range(-L,L+1)))
t2[t2<s1] = s1
t2[t2>=s2] = s2-1
idx = np.int64(t2)
# print(idx)
vec1_local[s1:s2] = idx
vec1_serial[s1:s2] = ref_serial[idx]
feature_mtx[s1:s2] = x_mtx[idx]
signal_mtx[s1:s2] = y[idx]
if i%10==0:
print(i,num2,vec1_local[s1],vec1_serial[s1])
# signal_mtx = signal_mtx[:,np.newaxis]
signal_mtx = np.expand_dims(signal_mtx, axis=-1)
# signal_mtx = np.expand_dims(signal_ntx, axis=-1)
return feature_mtx, signal_mtx, vec1_serial, vec1_local
def read_predict(y, vec, idx, flanking1=3, type_id=0, base1=0.25):
num1, context_size = vec.shape[0], vec.shape[1]
if len(idx)==0:
idx = range(0,num1)
a1 = np.asarray(range(0,context_size))
a2 = np.ones((num1,1))
mtx1 = np.outer(a2,a1)
# weight = 0.5*np.ones(context_size)
weight = np.ones(context_size)
L = int((context_size-1)*0.5)
if type_id==1:
base1 = base1
for i in range(0,L+1):
weight[i] = base1+(1-base1)*i/L
for i in range(L,context_size):
weight[i] = 1-(1-base1)*(i-L)/L
if flanking1<=L:
idx_range = np.asarray(range(L-flanking1,L+flanking1+1))
weight[idx_range] = 1
# weight = weight/np.sum(weight)
weight_vec = np.outer(a2,weight)
# print(num1,context_size,L,idx_range)
# print(weight)
serial1 = vec[:,L]
t1 = np.sum(serial1!=idx)
if t1>0:
print("error! read predict %d"%(t1))
return
## idx_vec, pos_vec, weight_vec = np.ravel(vec), np.ravel(mtx1), np.ravel(mtx2)
# idx_vec, weight_vec = np.ravel(vec), np.ravel(mtx2)
# y1 = np.ravel(y)
# value = np.zeros(num1)
# for i in range(0,num1):
# b1 = np.where(idx_vec==idx[i])[0]
# if len(b1)==0:
# print("error! %d %d"%(i,idx[i]))
# t_weight = weight_vec[b1]
# t_weight = t_weight*1.0/np.sum(t_weight)
# value[i] = np.dot(y1[b1],t_weight)
value = np.zeros(num1)
y = y.reshape((y.shape[0],y.shape[1]))
for i in range(0,num1):
b1 = (vec==idx[i])
# if len(b1)==0:
# print("error! %d %d"%(i,idx[i]))
t_weight = weight_vec[b1]
t_weight = t_weight*1.0/np.sum(t_weight)
value[i] = np.dot(y[b1],t_weight)
return value
def read_predict_1(y, vec, idx, flanking1=3, type_id=0, base1=0.25):
num1, context_size = vec.shape[0], vec.shape[1]
if len(idx)==0:
idx = range(0,num1)
sample_num1 = len(idx)
L = int((context_size-1)*0.5)
serial1 = vec[:,L]
# t1 = np.sum(serial1!=idx)
# if t1>0:
# print("error! read predict %d"%(t1))
# return
assert list(serial1)==list(idx)
## idx_vec, pos_vec, weight_vec = np.ravel(vec), np.ravel(mtx1), np.ravel(mtx2)
# idx_vec, weight_vec = np.ravel(vec), np.ravel(mtx2)
# y1 = np.ravel(y)
# value = np.zeros(num1)
# for i in range(0,num1):
# b1 = np.where(idx_vec==idx[i])[0]
# if len(b1)==0:
# print("error! %d %d"%(i,idx[i]))
# t_weight = weight_vec[b1]
# t_weight = t_weight*1.0/np.sum(t_weight)
# value[i] = np.dot(y1[b1],t_weight)
# y = y.reshape((y.shape[0],y.shape[1]))
dim1 = y.shape[-1]
value = np.zeros((sample_num1,dim1),dtype=np.float32)
if type_id==0:
for i in range(0,sample_num1):
b1 = (vec==idx[i])
value[i] = np.mean(y[b1],axis=0)
else:
a1 = np.asarray(range(0,context_size))
a2 = np.ones((num1,1))
mtx1 = np.outer(a2,a1)
# weight = 0.5*np.ones(context_size)
weight = np.ones(context_size)
base1 = base1
for i in range(0,L+1):
weight[i] = base1+(1-base1)*i/L
for i in range(L,context_size):
weight[i] = 1-(1-base1)*(i-L)/L
if flanking1<=L:
idx_range = np.asarray(range(L-flanking1,L+flanking1+1))
weight[idx_range] = 1
# weight = weight/np.sum(weight)
weight_vec = np.outer(a2,weight)
# print(num1,context_size,L,idx_range)
# print(weight)
for i in range(0,sample_num1):
b1 = (vec==idx[i])
if len(b1)==0:
print("error! %d %d"%(i,idx[i]))
t_weight = weight_vec[b1]
t_weight = t_weight*1.0/np.sum(t_weight)
t_weight = np.tile(t_weight,[dim1,1]).T
# value[i] = np.dot(y[b1],t_weight)
value[i] = np.sum(y[b1]*t_weight,axis=0)
return value
def read_predict_weighted(y, vec, idx, flanking1=3):
num1, context_size = vec.shape[0], vec.shape[1]
if len(idx)==0:
idx = range(0,num1)
a1 = np.asarray(range(0,context_size))
a2 = np.ones((num1,1))
mtx1 = np.outer(a2,a1)
base1 = 0.25
weight = base1*np.ones(context_size)
L = int((context_size-1)*0.5)
for i in range(0,context_size):
if i<=L:
weight[i] = base1+(1-base1)*i/L
else:
weight[i] = 1-(1-base1)*(i-L)/L
idx_range = np.asarray(range(L-flanking1,L+flanking1+1))
weight[idx_range] = 1
mtx2 = np.outer(a2,weight)
print(num1,context_size,L,idx_range)
print(weight)
serial1 = vec[:,L]
t1 = np.sum(serial1!=idx)
if t1>0:
print("error! %d"%(t1))
idx_vec, pos_vec, weight_vec = np.ravel(vec), np.ravel(mtx1), np.ravel(mtx2)
y1 = np.ravel(y)
value = np.zeros(num1)
for i in range(0,num1):
b1 = np.where(idx_vec==idx[i])[0]
if len(b1)==0:
print("error! %d %d"%(i,idx[i]))
t_weight = weight_vec[b1]
t_weight = t_weight*1.0/np.sum(t_weight)
value[i] = np.dot(y1[b1],t_weight)
return value
def dot_layer(inputs):
x,y = inputs
return K.sum(x*y,axis = -1,keepdims=True)
def corr(y_true, y_pred):
return np.min(np.corrcoef(y_true,y_pred))
def score_function(y_test, y_pred, y_proba):
auc = roc_auc_score(y_test,y_proba)
aupr = average_precision_score(y_test,y_proba)
precision = precision_score(y_test,y_pred)
recall = recall_score(y_test,y_pred)
accuracy = (np.sum(y_test == y_pred)*1.0 / len(y_test))
F1 = 2*precision*recall/(precision+recall)
# print(auc,aupr,precision,recall)
return accuracy, auc, aupr, precision, recall, F1
def score_function_group(y_test, y_pred, y_proba, group_label):
group_label_vec = np.unique(group_label)
num1 = len(group_label_vec)
y_test_group = np.zeros(num1,dtype=np.int32)
y_pred_group = np.zeros(num1,dtype=np.int32)
y_prob_group = np.zeros(num1,dtype=np.float32)
for i in range(num1):
t_label = group_label_vec[i]
id1 = np.where(group_label==t_label)[0]
id2 = (y_test[id1]!=y_test[id1[0]])
if np.sum(id2)>0:
print('error!')
return -1
y_test_group[i] = y_test[id1[0]]
y_pred_group[i] = np.max(y_pred[id1])
y_prob_group[i] = np.max(y_proba[id1])
# print(t_label,id1,y_test[id1],y_pred[id1],y_proba[id1])
# print(auc,aupr,precision,recall)
accuracy, auc, aupr, precision, recall, F1 = score_function(y_test_group,y_pred_group,y_prob_group)
return accuracy, auc, aupr, precision, recall, F1
def load_samples(chrom_vec,chrom,y_label_ori,y_group1,y_signal_ori1,filename2,filename2a,kmer_size,kmer_dict1,generate):
x_mtx_vec, y_label_vec, y_group_vec, y_signal_ori_vec = [], [], [], []
for chrom_id in chrom_vec:
chrom_id1 = 'chr%s'%(chrom_id)
sel_idx = np.where(chrom==chrom_id1)[0]
print(('sel_idx:%d')%(len(sel_idx)))
if generate==0:
filename2 = 'training_mtx/training2_mtx_%s.npy'%(chrom_id)
if(os.path.exists(filename2)==True):
x_mtx = np.load(filename2)
x_kmer = np.load('training2_kmer_%s.npy'%(chrom_id))
else:
generate = 1
if generate==1:
x_kmer, x_mtx = load_seq_2(filename2a,kmer_size,kmer_dict1,sel_idx)
np.save('training2_kmer_%s'%(chrom_id),x_kmer)
np.save('training2_mtx_%s'%(chrom_id),x_mtx)
x_mtx = np.transpose(x_mtx,(0,2,1))
y_label, y_group, y_signal_ori = y_label_ori[sel_idx], y_group1[sel_idx], y_signal_ori1[sel_idx]
x_mtx_vec.extend(x_mtx)
y_label_vec.extend(y_label)
y_group_vec.extend(y_group)
y_signal_ori_vec.extend(y_signal_ori)
x_mtx, y_label, y_group, y_signal_ori = np.asarray(x_mtx_vec), np.asarray(y_label_vec), np.asarray(y_group_vec), np.asarray(y_signal_ori_vec)
print(x_mtx.shape,y_signal_ori.shape)
y_signal = signal_normalize(y_signal_ori,[0,1])
threshold = signal_normalize_query(0,[np.min(y_signal_ori),np.max(y_signal_ori)],[0,1])
return x_mtx, y_signal, y_label, threshold
def load_samples_kmer(chrom_vec,chrom,seq,kmer_size,kmer_dict1,path_1):
x_mtx_vec, y_label_vec, y_group_vec, y_signal_ori_vec = [], [], [], []
for chrom_id in chrom_vec:
chrom_id1 = 'chr%s'%(chrom_id)
sel_idx = np.where(chrom==chrom_id1)[0]
print(('sel_idx:%d')%(len(sel_idx)))
x_kmer = load_seq_2_kmer(seq,kmer_size,kmer_dict1,chrom_id1,sel_idx)
np.save('%s/training2_kmer_%s'%(path_1,chrom_id),x_kmer)
return True
def load_kmer_single(species_name):
path1 = './'
filename1 = '%s/training2_kmer_%s.npy'%(path1,species_name)
filename2 = '%s/training2_kmer_%s.serial.npy'%(path1,species_name)
data1 = np.load(filename1)
t_serial = np.load(filename2)
filename3 = '%s/estimate_rt/estimate_rt_%s.txt'%(path1,species_name)
filename3a = '%s/estimate_rt/estimate_rt_%s.sel.txt'%(path1,species_name)
temp1 = pd.read_csv(filename3,sep='\t')
temp2 = np.read_csv(filename3a,sep='\t')
colname1, colname2 = list(temp1), list(temp2)
chrom1, start1, stop1, serial1 = temp1[colname1[0]], temp1[colname1[1]], temp1[colname1[2]], temp1[colname1[3]]
chrom2, start2, stop2, serial2 = temp2[colname2[0]], temp2[colname2[1]], temp2[colname2[2]], temp2[colname2[3]]
data1_sub = data1[map_idx]
print(data1.shape, data1_sub.shape)
return data1_sub, map_idx
# the mapped indices of selected regions
def load_map_idx(species_name):
path1 = './'
filename3 = '%s/estimate_rt/estimate_rt_%s.txt'%(path1,species_name)
filename3a = '%s/estimate_rt/estimate_rt_%s.sel.txt'%(path1,species_name)
temp1 = pd.read_csv(filename3,sep='\t')
temp2 = pd.read_csv(filename3a,sep='\t')
colname1, colname2 = list(temp1), list(temp2)
chrom1, start1, stop1, serial1 = temp1[colname1[0]], temp1[colname1[1]], temp1[colname1[2]], temp1[colname1[3]]
chrom2, start2, stop2, serial2 = temp2[colname2[0]], temp2[colname2[1]], temp2[colname2[2]], temp2[colname2[3]]
map_idx = mapping_Idx(serial1,serial2)
return serial1, serial2, map_idx
def dimension_reduction(x_ori,feature_dim,shuffle,sub_sample,type_id):
if shuffle==1 and sub_sample>0:
idx = np.random.permutation(x_ori.shape[0])
else:
idx = np.asarray(range(0,x_ori.shape[0]))
if (sub_sample>0) and (type_id!=7) and (type_id!=11):
id1 = idx[0:sub_sample]
else:
id1 = idx
if type_id==0:
# PCA
pca = PCA(n_components=feature_dim, whiten = False, random_state = 0)
if sub_sample>0:
pca.fit(x_ori[id1,:])
x = pca.transform(x_ori)
else:
x = pca.fit_transform(x_ori)
# X_pca_reconst = pca.inverse_transform(x)
elif type_id==1:
# Incremental PCA
n_batches = 10
inc_pca = IncrementalPCA(n_components=feature_dim)
for X_batch in np.array_split(x_ori, n_batches):
inc_pca.partial_fit(X_batch)
x = inc_pca.transform(x_ori)
# X_ipca_reconst = inc_pca.inverse_transform(x)
elif type_id==2:
# Kernel PCA
kpca = KernelPCA(kernel="rbf",n_components=feature_dim, gamma=None, fit_inverse_transform=True, random_state = 0, n_jobs=50)
kpca.fit(x_ori[id1,:])
x = kpca.transform(x_ori)
# X_kpca_reconst = kpca.inverse_transform(x)
elif type_id==3:
# Sparse PCA
sparsepca = SparsePCA(n_components=feature_dim, alpha=0.0001, random_state=0, n_jobs=50)
sparsepca.fit(x_ori[id1,:])
x = sparsepca.transform(x_ori)
elif type_id==4:
# SVD
SVD_ = TruncatedSVD(n_components=feature_dim,algorithm='randomized', random_state=0, n_iter=5)
SVD_.fit(x_ori[id1,:])
x = SVD_.transform(x_ori)
# X_svd_reconst = SVD_.inverse_transform(x)
elif type_id==5:
# Gaussian Random Projection
GRP = GaussianRandomProjection(n_components=feature_dim,eps = 0.5, random_state=2019)
GRP.fit(x_ori[id1,:])
x = GRP.transform(x_ori)
elif type_id==6:
# Sparse random projection
SRP = SparseRandomProjection(n_components=feature_dim,density = 'auto', eps = 0.5, random_state=2019, dense_output = False)
SRP.fit(x_ori[id1,:])
x = SRP.transform(x_ori)
elif type_id==7:
# MDS
mds = MDS(n_components=feature_dim, n_init=12, max_iter=1200, metric=True, n_jobs=4, random_state=2019)
x = mds.fit_transform(x_ori[id1])
elif type_id==8:
# ISOMAP
isomap = Isomap(n_components=feature_dim, n_jobs = 4, n_neighbors = 5)
isomap.fit(x_ori[id1,:])
x = isomap.transform(x_ori)
elif type_id==9:
# MiniBatch dictionary learning
miniBatchDictLearning = MiniBatchDictionaryLearning(n_components=feature_dim,batch_size = 1000,alpha = 1,n_iter = 25, random_state=2019)
if sub_sample>0:
miniBatchDictLearning.fit(x_ori[id1,:])
x = miniBatchDictLearning.transform(x_ori)
else:
x = miniBatchDictLearning.fit_transform(x_ori)
elif type_id==10:
# ICA
fast_ICA = FastICA(n_components=feature_dim, algorithm = 'parallel',whiten = True,max_iter = 100, random_state=2019)
if sub_sample>0:
fast_ICA.fit(x_ori[id1])
x = fast_ICA.transform(x_ori)
else:
x = fast_ICA.fit_transform(x_ori)
# X_fica_reconst = FastICA.inverse_transform(x)
elif type_id==11:
# t-SNE
tsne = TSNE(n_components=feature_dim,learning_rate=300,perplexity = 30,early_exaggeration = 12,init = 'random', random_state=2019)
x = tsne.fit_transform(x_ori)
elif type_id==12:
# Locally linear embedding
lle = LocallyLinearEmbedding(n_components=feature_dim, n_neighbors = np.max((int(feature_dim*1.5),500)),method = 'modified', n_jobs = 20, random_state=2019)
lle.fit(x_ori[id1,:])
x = lle.transform(x_ori)
elif type_id==13:
# Autoencoder
feature_dim_ori = x_ori.shape[1]
m = Sequential()
m.add(Dense(512, activation='elu', input_shape=(feature_dim_ori,)))
# m.add(Dense(256, activation='elu'))
m.add(Dense(feature_dim, activation='linear', name="bottleneck"))
# m.add(Dense(256, activation='elu'))
m.add(Dense(512, activation='elu'))
m.add(Dense(feature_dim_ori, activation='sigmoid'))
m.compile(loss='mean_squared_error', optimizer = Adam())
history = m.fit(x_ori[id1], x_ori[id1], batch_size=256, epochs=20, verbose=1)
encoder = Model(m.input, m.get_layer('bottleneck').output)
x = encoder.predict(x_ori)
Renc = m.predict(x_ori)
return x
def feature_transform(x_train, x_test, feature_dim_kmer, feature_dim, shuffle, sub_sample_ratio, type_id, normalize):
x_ori1 = np.vstack((x_train,x_test))
dim1 = x_ori1.shape[1]
dim2 = dim1-feature_dim_kmer
print("feature_dim_kmer",feature_dim_kmer,dim2)
x_ori = x_ori1[:,dim2:]
if normalize>=1:
sc = StandardScaler()
x_ori = sc.fit_transform(x_ori) # normalize data
# x_train_sub = sc.fit_transform(x_ori[0:num_train,:])
# x_test_sub = sc.transform(x_ori[num_train+num_test,:])
# x_train_sub = sc.fit_transform(x_ori[0:num_train,:])
# x_test_sub = sc.transform(x_ori[num_train+num_test,:])
num_train, num_test = x_train.shape[0], x_test.shape[0]
vec1 = ['PCA','Incremental PCA','Kernel PCA','Sparse PCA','SVD','GRP','SRP','MDS','ISOMAP','Minibatch','ICA','tSNE','LLE','Encoder']
start = time.time()
if sub_sample_ratio<1:
sub_sample = int(x_ori.shape[0]*sub_sample_ratio)
else:
sub_sample = -1
x = dimension_reduction(x_ori,feature_dim,shuffle,sub_sample,type_id)
stop = time.time()
print("feature transform %s"%(vec1[type_id]),stop - start)
x1 = np.hstack((x_ori1[:,0:dim2],x))
if normalize>=2:
sc = StandardScaler()
x1 = sc.fit_transform(x1)
x_train1, x_test1 = x1[0:num_train], x1[num_train:num_train+num_test]
print(x_train.shape,x_train1.shape,x_test.shape,x_test1.shape)
return x_train1, x_test1
# select sample
def sample_select(x_mtx, idx_sel_list, tol=5, L=5):
num1 = len(idx_sel_list)
feature_dim = x_mtx.shape[1]
# L = 5
size1 = 2*L+1
vec1_list = np.zeros((num1,size1))
feature_list = np.zeros((num1,size1*feature_dim))
for i in range(0,num1):
temp1 = idx_sel_list[i]
t_chrom, t_serial = temp1[0], temp1[1]
id1 = []
for k in range(-L,L+1):
id2 = np.min((np.max((i+k,0)),num1-1))
id1.append(id2)
# print(id1)
vec1 = []
start1 = t_serial
t_id = i
for k in range(1,L+1):
id2 = id1[L-k]
if (idx_sel_list[id2,0]==t_chrom) and (idx_sel_list[id2,1]>=start1-tol):
vec1.append(id2)
t_id = id2
start1 = idx_sel_list[id2,1]
else:
vec1.append(t_id)
vec1 = vec1[::-1]
start1 = t_serial
t_id = i
vec1.append(t_id)
for k in range(1,L+1):
id2 = id1[L+k]
if (idx_sel_list[id2,0]==t_chrom) and (idx_sel_list[id2,1]<=start1+tol):
vec1.append(id2)
t_id = id2
start1 = idx_sel_list[id2,1]
else:
vec1.append(t_id)
t_feature = x_mtx[vec1]
vec1_list[i] = idx_sel_list[vec1,1]
feature_list[i] = np.ravel(t_feature)
if i%10000==0:
print(i,t_feature.shape,vec1,vec1_list[i])
return feature_list, vec1_list
|
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
# Define a class to receive the characteristics of each line detection
class Lane():
def __init__(self):
# 当前的图像
self.current_warped_binary = None
# 当前图片的尺寸
self.current_warped_binary_shape = []
# 检测到的车道线像素的横坐标 x values for detected line pixels
self.allx = None
# 检测到的车道线像素的纵坐标 y values for detected line pixels
self.ally = None
# 以纵坐标为自变量,取值空间
self.ploty = None
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 是否检测到车道线 was the line detected in the last iteration?
self.detected = False
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 保存的数据量
self.n = 5
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 最近n个帧的拟合曲线 x values of the last n fits of the line
self.recent_fitted_xs = []
# 最近n个帧的平均拟合曲线 average x values of the fitted line over the last n iterations
self.average_fitted_x = []
# 当前帧的拟合曲线
self.current_fitted_x = []
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 最近n个帧的拟合函数
self.recent_fits = []
# 最近n个帧的拟合函数 polynomial coefficients averaged over the last n iterations
self.average_fit = []
# 当前帧的拟合函数 polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
# 拟合函数的误差 difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 半径 radius of curvature of the line in some units
self.radius_of_curvature = []
# 车辆在车道线之间距离 distance in meters of vehicle center from the line
self.line_base_pos = None
# 对全新的帧进行车道线像素检测
def find_lane_pixels(self, binary_warped, location):
self.current_warped_binary = binary_warped
self.current_warped_binary_shape = binary_warped.shape
self.ploty = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0] // 2:, :], axis=0)
# Create an output image to draw on and visualize the result
# out_img = np.dstack((binary_warped, binary_warped, binary_warped))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0] // 2)
if location == "left":
base = np.argmax(histogram[:midpoint])
elif location == "right":
base = np.argmax(histogram[midpoint:]) + midpoint
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = 9
# Set the width of the windows +/- margin
margin = 80
# Set minimum number of pixels found to recenter window
minpix = 50
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0] // nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero() # 扁平化后非零值点的列表
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
current = base
# Create empty lists to receive left and right lane pixel indices
lane_inds = []
# right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window + 1) * window_height
win_y_high = binary_warped.shape[0] - window * window_height
win_x_low = current - margin
win_x_high = current + margin
# # Draw the windows on the visualization image
# cv2.rectangle(out_img, (win_xleft_low, win_y_low),
# (win_xleft_high, win_y_high), (0, 255, 0), 2)
# cv2.rectangle(out_img, (win_xright_low, win_y_low),
# (win_xright_high, win_y_high), (0, 255, 0), 2)
# 形成对每个像素的bool值
# Identify the nonzero pixels in x and y within the window #
good_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_x_low) & (nonzerox < win_x_high)).nonzero()[0]
# Append these indices to the lists
lane_inds.append(good_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_inds) > minpix:
current = np.int(np.mean(nonzerox[good_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
lane_inds = np.concatenate(lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
x = nonzerox[lane_inds]
y = nonzeroy[lane_inds]
self.allx = x
self.ally = y
return x, y
# 在之前的plot基础上找车道线
def search_pixel_around_poly(self, binary_warped):
self.current_warped_binary = binary_warped
self.current_warped_binary_shape = binary_warped.shape
self.ploty = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
# HYPERPARAMETER
# Choose the width of the margin around the previous polynomial to search
# The quiz grader expects 100 here, but feel free to tune on your own!
margin = 80
# Grab activated pixels
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
fit = self.recent_fits[-1]
### TO-DO: Set the area of search based on activated x-values ###
### within the +/- margin of our polynomial function ###
### Hint: consider the window areas for the similarly named variables ###
### in the previous quiz, but change the windows to our new search area ###
lane_inds = ((nonzerox > (fit[0] * (nonzeroy ** 2) + fit[1] * nonzeroy + fit[2] - margin)) & (
nonzerox < (fit[0] * (nonzeroy ** 2) + fit[1] * nonzeroy + fit[2] + margin)))
# Again, extract left and right line pixel positions
x = nonzerox[lane_inds]
y = nonzeroy[lane_inds]
self.allx = x
self.ally = y
return x, y
def fit_polynomial(self):
ploty = self.ploty
# Fit a second order polynomial to each using `np.polyfit`
fit = np.polyfit(self.ally, self.allx, 2)
# 存储当前结果
self.current_fit = fit
# 计算误差
if len(self.recent_fits) == 0:
self.diffs = [0,0,0]
else:
new = np.array(self.current_fit)
old = np.array(self.recent_fits[-1])
self.diffs = new - old
# 存储为历史结果
if len(self.recent_fits) < self.n:
self.recent_fits.append(self.current_fit)
elif len(self.recent_fits) == self.n:
self.recent_fits.pop(0)
self.recent_fits.append(self.current_fit)
else:
self.recent_fits.append(self.current_fit)
self.recent_fits = self.recent_fits[-self.n:] # 后面n个
# 计算当前平均
self.average_fit = np.array(self.recent_fits).mean(axis=0)
try:
x_fitted = self.average_fit[0] * ploty ** 2 + self.average_fit[1] * ploty + self.average_fit[2]
except TypeError:
# Avoids an error if `left` and `right_fit` are still none or incorrect
print('The function failed to fit a line!')
x_fitted = 1 * ploty ** 2 + 1 * ploty
self.detected = False
else:
self.detected = True
self.current_fitted_x = x_fitted
# 存储为历史结果
if len(self.recent_fitted_xs) < self.n:
self.recent_fitted_xs.append(self.current_fitted_x)
elif len(self.recent_fitted_xs) == self.n:
self.recent_fitted_xs.pop(0)
self.recent_fitted_xs.append(self.current_fitted_x)
else:
self.recent_fitted_xs.append(self.current_fitted_x)
self.recent_fitted_xs = self.recent_fitted_xs[-self.n:] # 后面n个
self.average_fitted_x = np.array(self.recent_fitted_xs).mean(axis=0)
return self.average_fitted_x
def fit(self, binary_warped,location,sequence=True):
if sequence:
if not self.detected:
# 没有检测到,重新开始检测
self.find_lane_pixels(binary_warped,location)
else:
# 从上一次周围开始检测
self.search_pixel_around_poly(binary_warped)
# TODO 如果两次检测的误差较大怎么办?
# TODO 是否存在
self.fit_polynomial()
# if np.abs(self.diffs).sum() > 20:
# self.current_fit = np.array(self.recent_fits[:-1]).mean(axis=0)
# self.recent_fits[-1] = self.current_fit
# self.average_fit = np.array(self.recent_fits).mean(axis=0)
#
# self.current_fitted_x = np.array(self.recent_fitted_xs[:-1]).mean(axis=0)
# self.recent_fitted_xs[-1] = self.current_fitted_x
# self.average_fitted_x = np.array(self.recent_fitted_xs).mean(axis=0)
else:
self.find_lane_pixels(binary_warped, location)
self.fit_polynomial()
def measure_curvature_real(self,ploty, x, y):
'''
Calculates the curvature of polynomial functions in meters.
'''
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30 / 720 # meters per pixel in y dimension
xm_per_pix = 3.7 / 700 # meters per pixel in x dimension
fit_cr = np.polyfit(y * ym_per_pix, x * xm_per_pix, 2)
# Define y-value where we want radius of curvature
# We'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# Calculation of R_curve (radius of curvature)
curverad = ((1 + (2 * fit_cr[0] * y_eval * ym_per_pix + fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * fit_cr[0])
self.radius_of_curvature = curverad
return curverad
if __name__ == "__main__":
from lane.perspective import perspective,src,dst
from lane.gaussian_blur import gaussian_blur
from lane.combined_threshold import combined_threshold
from lane.measure_vehicle_pos import measure_vehicle_pos
from lane.draw_lane import draw_lane
image = mpimg.imread('../output_images/undistorted/straight_lines1-undistorted.jpg')
image = gaussian_blur(image, 3)
combined = combined_threshold(image, ksize=3,
th=[[20, 100], [25, 254], [100, 250], [0.6, 1.2], [180, 254], [250, 0]])
combined = gaussian_blur(combined, 3)
perspectived_img = perspective(combined,src,dst)
# plt.imshow(perspectived_img,cmap="gray")
# plt.show()
left_lane = Lane()
left_lane.fit(perspectived_img,"left")
right_lane = Lane()
right_lane.fit(perspectived_img, "right")
result = left_lane.visual(perspectived_img,"left")
plt.imshow(result)
result = right_lane.visual(perspectived_img, "right")
plt.imshow(result)
plt.show()
# # 计算曲率
# left_r = left_lane.measure_curvature_real(left_lane.ploty, left_lane.average_fitted_x, left_lane.ploty)
# right_r = left_lane.measure_curvature_real(right_lane.ploty, right_lane.average_fitted_x, right_lane.ploty)
#
# # 计算偏移值
# v = measure_vehicle_pos(left_lane.average_fitted_x, right_lane.average_fitted_x,left_lane.current_warped_binary_shape[1])
#
# # 绘制车道线
# img = draw_lane(image, combined, dst, src,left_lane.current_fitted_x, right_lane.current_fitted_x, right_lane.ploty)
# plt.imshow(img)
# # 打印文字
# plt.text(0,60,"Radius of Curvature = %d(m)"%int(r),fontdict={'size': 20, 'color': 'w'})
# plt.text(0,120, "Vehicle is %.2f(m) left of center" % v, fontdict={'size': 20, 'color': 'w'})
# plt.show()
|
#Crie um programa que leia nome e duas notas de vários alunos e guarde tudo em uma lista composta. No final, mostre um boletim contendo a média de cada um e permita que o usuário possa mostrar as notas de cada aluno individualmente.
aluno=[]
while True:
nome=str(input('Digite o nome do aluno: '))
nota1=float(input('Digite a nota 1: '))
nota2=float(input('Digite a nota 2: '))
nota3=float(input('Digite a nota 3: '))
nota4=float(input('Digite a nota 4: '))
media=(nota1+nota2+nota3+nota4)/4
if media>=6:
apv='APROVADO'
else:
apv='REPROVADO'
aluno.append([nome,[nota1,nota2,nota3,nota4],media,apv])
rp=str(input('Quer continuar? [S/N] ')).upper()
if rp in 'N':
break
print('##'*30)
print(f'{"Nº":<4}{"NOME":<10}{"MÉDIA":>8}{"RESULTADO":>12}')
print('-'*35)
for i,n in enumerate(aluno):
print(f'{i:<4}{n[0]:<10}{n[2]:>8}{n[3]:>12}')
while True:
pert=int(input('Digite o número do aluno para ver a nota:[999 para interromper] '))
if pert==999:
print('FINALIZANDO O PROGRAMA')
break
if pert<=len(aluno)-1:
print(f'As notas de aluno {aluno[pert][0]} são: {aluno[pert][1]}')
|
import time
import unittest
from operator import attrgetter
from typing import Dict, List, Tuple
from locust import User
from locust.dispatch import UsersDispatcher
from locust.runners import WorkerNode
from locust.test.util import clear_all_functools_lru_cache
_TOLERANCE = 0.025
class TestRampUpUsersFromZero(unittest.TestCase):
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_0_5(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_1(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=1)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_4_workers_with_spawn_rate_of_1(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
worker_node4 = WorkerNode("4")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3, worker_node4], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=1)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 1, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 1, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 1, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 1},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 1, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 1, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_2(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=2)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_2_4(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=2.4)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_3(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_4(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=4)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_up_users_to_3_workers_with_spawn_rate_of_9(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
sleep_time = 0.2 # Speed-up test
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3], user_classes=[User1, User2, User3]
)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=9)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
class TestWaitBetweenDispatch(unittest.TestCase):
def test_wait_between_dispatch(self):
class User1(User):
weight = 1
user_classes = [User1]
workers = [WorkerNode("1")]
for spawn_rate, expected_wait_between_dispatch in [
(0.5, 1 / 0.5),
(1, 1),
(2, 1),
(2.4, 2 / 2.4),
(4, 1),
(9, 1),
]:
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=spawn_rate)
self.assertEqual(users_dispatcher._wait_between_dispatch, expected_wait_between_dispatch)
class TestRampDownUsersToZero(unittest.TestCase):
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_0_5(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_1(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=1)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_4_workers_with_spawn_rate_of_1(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(4)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=1)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 1, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 1, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 1, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 1},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 1, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 1, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
"4": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_2(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=2)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_2_4(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=2.4)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_3(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 2},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 1, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_4(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=4)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 2, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 2, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 1},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 1, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
def test_ramp_down_users_to_3_workers_with_spawn_rate_of_9(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
workers = [WorkerNode(str(i + 1)) for i in range(3)]
initial_user_count = 9
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=9)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 0, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 0, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 0},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
class TestRampUpThenDownThenUp(unittest.TestCase):
def test_ramp_up_then_down_then_up(self):
for user1_weight, user2_weight, user3_weight, user4_weight, user5_weight in [
(1, 1, 1, 1, 1),
(1, 2, 3, 4, 5),
(1, 3, 5, 7, 9),
]:
class User1(User):
weight = user1_weight
class User2(User):
weight = user2_weight
class User3(User):
weight = user3_weight
class User4(User):
weight = user4_weight
class User5(User):
weight = user5_weight
all_user_classes = [User1, User2, User3, User4, User5]
for number_of_user_classes in range(1, len(all_user_classes) + 1):
user_classes = all_user_classes[:number_of_user_classes]
for max_user_count, min_user_count in [(30, 15), (54, 21), (14165, 1476)]:
for worker_count in [1, 3, 5, 9]:
workers = [WorkerNode(str(i + 1)) for i in range(worker_count)]
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
# Ramp-up to go to `min_user_count` #########
users_dispatcher.new_dispatch(target_user_count=min_user_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
all_dispatched_users_ramp_up_to_min_user_count = list(users_dispatcher)
# Ramp-up to go to `max_user_count` #########
users_dispatcher.new_dispatch(target_user_count=max_user_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
# Ramp-down go back to `min_user_count` #########
users_dispatcher.new_dispatch(target_user_count=min_user_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
all_dispatched_users_ramp_down_to_min_user_count = list(users_dispatcher)
# Assertions #########
self.assertDictEqual(
all_dispatched_users_ramp_up_to_min_user_count[-1],
all_dispatched_users_ramp_down_to_min_user_count[-1],
)
class TestDispatchUsersToWorkersHavingTheSameUsersAsTheTarget(unittest.TestCase):
def test_dispatch_users_to_3_workers(self):
"""Final distribution should be {"User1": 3, "User2": 3, "User3": 3}"""
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
user_count = 9
for spawn_rate in [0.15, 0.5, 1, 2, 2.4, 3, 4, 9]:
workers = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=user_count, spawn_rate=user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=user_count, spawn_rate=spawn_rate)
users_dispatcher._wait_between_dispatch = sleep_time
ts = time.perf_counter()
self.assertDictEqual(
next(users_dispatcher),
{
"1": {"User1": 3, "User2": 0, "User3": 0},
"2": {"User1": 0, "User2": 3, "User3": 0},
"3": {"User1": 0, "User2": 0, "User3": 3},
},
)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
ts = time.perf_counter()
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
clear_all_functools_lru_cache()
class TestDistributionIsRespectedDuringDispatch(unittest.TestCase):
def test_dispatch_75_users_to_4_workers_with_spawn_rate_of_5(self):
"""
Test case covering reported issue in https://github.com/locustio/locust/pull/1621#issuecomment-853624275.
The case is to ramp-up from 0 to 75 users with two user classes. `User1` has a weight of 1 and `User2`
has a weight of 2. The original issue was with 500 users, but to keep the test shorter, we use 75 users.
Final distribution should be {"User1": 25, "User2": 50}
"""
class User1(User):
weight = 1
class User2(User):
weight = 2
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_node3 = WorkerNode("3")
worker_node4 = WorkerNode("4")
users_dispatcher = UsersDispatcher(
worker_nodes=[worker_node1, worker_node2, worker_node3, worker_node4], user_classes=[User1, User2]
)
users_dispatcher.new_dispatch(target_user_count=75, spawn_rate=5)
users_dispatcher._wait_between_dispatch = 0
# total user count = 5
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 3})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 1, "User2": 1},
"2": {"User1": 1, "User2": 0},
"3": {"User1": 0, "User2": 1},
"4": {"User1": 0, "User2": 1},
},
)
# total user count = 10
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 7})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 1, "User2": 2},
"2": {"User1": 1, "User2": 2},
"3": {"User1": 0, "User2": 2},
"4": {"User1": 1, "User2": 1},
},
)
# total user count = 15
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 10})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 1, "User2": 3},
"2": {"User1": 2, "User2": 2},
"3": {"User1": 1, "User2": 3},
"4": {"User1": 1, "User2": 2},
},
)
# total user count = 20
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 7, "User2": 13})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 2, "User2": 3},
"2": {"User1": 2, "User2": 3},
"3": {"User1": 1, "User2": 4},
"4": {"User1": 2, "User2": 3},
},
)
# total user count = 25
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 8, "User2": 17})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 2, "User2": 5},
"2": {"User1": 2, "User2": 4},
"3": {"User1": 2, "User2": 4},
"4": {"User1": 2, "User2": 4},
},
)
# total user count = 30
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 10, "User2": 20})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 3, "User2": 5},
"2": {"User1": 3, "User2": 5},
"3": {"User1": 2, "User2": 5},
"4": {"User1": 2, "User2": 5},
},
)
# total user count = 35
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 12, "User2": 23})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 3, "User2": 6},
"2": {"User1": 3, "User2": 6},
"3": {"User1": 3, "User2": 6},
"4": {"User1": 3, "User2": 5},
},
)
# total user count = 40
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 13, "User2": 27})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 3, "User2": 7},
"2": {"User1": 4, "User2": 6},
"3": {"User1": 3, "User2": 7},
"4": {"User1": 3, "User2": 7},
},
)
# total user count = 45
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 15, "User2": 30})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 4, "User2": 8},
"2": {"User1": 4, "User2": 7},
"3": {"User1": 3, "User2": 8},
"4": {"User1": 4, "User2": 7},
},
)
# total user count = 50
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 17, "User2": 33})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 4, "User2": 9},
"2": {"User1": 5, "User2": 8},
"3": {"User1": 4, "User2": 8},
"4": {"User1": 4, "User2": 8},
},
)
# total user count = 55
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 18, "User2": 37})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 5, "User2": 9},
"2": {"User1": 5, "User2": 9},
"3": {"User1": 4, "User2": 10},
"4": {"User1": 4, "User2": 9},
},
)
# total user count = 60
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 20, "User2": 40})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 5, "User2": 10},
"2": {"User1": 5, "User2": 10},
"3": {"User1": 5, "User2": 10},
"4": {"User1": 5, "User2": 10},
},
)
# total user count = 65
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 22, "User2": 43})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 6, "User2": 11},
"2": {"User1": 6, "User2": 10},
"3": {"User1": 5, "User2": 11},
"4": {"User1": 5, "User2": 11},
},
)
# total user count = 70
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 23, "User2": 47})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 6, "User2": 12},
"2": {"User1": 6, "User2": 12},
"3": {"User1": 5, "User2": 12},
"4": {"User1": 6, "User2": 11},
},
)
# total user count = 75, User1 = 25, User2 = 50
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 25, "User2": 50})
self.assertDictEqual(
dispatched_users,
{
"1": {"User1": 6, "User2": 13},
"2": {"User1": 7, "User2": 12},
"3": {"User1": 6, "User2": 13},
"4": {"User1": 6, "User2": 12},
},
)
self.assertRaises(StopIteration, lambda: next(users_dispatcher))
class TestLargeScale(unittest.TestCase):
# fmt: off
weights = [
5, 55, 37, 2, 97, 41, 33, 19, 19, 34, 78, 76, 28, 62, 69, 5, 55, 37, 2, 97, 41, 33, 19, 19, 34,
78, 76, 28, 62, 69, 41, 33, 19, 19, 34, 78, 76, 28, 62, 69, 41, 33, 19, 19, 34, 78, 76, 28, 62, 69
]
# fmt: on
numerated_weights = dict(zip(range(len(weights)), weights))
weighted_user_classes = [type(f"User{i}", (User,), {"weight": w}) for i, w in numerated_weights.items()]
fixed_user_classes_10k = [type(f"FixedUser10k{i}", (User,), {"fixed_count": 2000}) for i in range(50)]
fixed_user_classes_1M = [type(f"FixedUser1M{i}", (User,), {"fixed_count": 20000}) for i in range(50)]
mixed_users = weighted_user_classes[:25] + fixed_user_classes_10k[25:]
def test_distribute_users(self):
for user_classes in [self.weighted_user_classes, self.fixed_user_classes_1M, self.mixed_users]:
workers = [WorkerNode(str(i)) for i in range(10_000)]
target_user_count = 1_000_000
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
ts = time.perf_counter()
users_on_workers, user_gen, worker_gen, active_users = users_dispatcher._distribute_users(
target_user_count=target_user_count
)
delta = time.perf_counter() - ts
# Because tests are run with coverage, the code will be slower.
# We set the pass criterion to 7000ms, but in real life, the
# `_distribute_users` method runs faster than this.
self.assertLessEqual(1000 * delta, 7000)
self.assertEqual(_user_count(users_on_workers), target_user_count)
def test_ramp_up_from_0_to_100_000_users_with_50_user_classes_and_1000_workers_and_5000_spawn_rate(self):
for user_classes in [
self.weighted_user_classes,
self.fixed_user_classes_1M,
self.fixed_user_classes_10k,
self.mixed_users,
]:
workers = [WorkerNode(str(i)) for i in range(1000)]
target_user_count = 100_000
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=target_user_count, spawn_rate=5_000)
users_dispatcher._wait_between_dispatch = 0
all_dispatched_users = list(users_dispatcher)
tol = 0.2
self.assertTrue(
all(
dispatch_iteration_duration <= tol
for dispatch_iteration_duration in users_dispatcher.dispatch_iteration_durations
),
"One or more dispatch took more than {:.0f}s to compute (max = {}ms)".format(
tol * 1000, 1000 * max(users_dispatcher.dispatch_iteration_durations)
),
)
self.assertEqual(_user_count(all_dispatched_users[-1]), target_user_count)
for dispatch_users in all_dispatched_users:
user_count_on_workers = [
sum(user_classes_count.values()) for user_classes_count in dispatch_users.values()
]
self.assertLessEqual(
max(user_count_on_workers) - min(user_count_on_workers),
1,
"One or more workers have too much users compared to the other workers when user count is {}".format(
_user_count(dispatch_users)
),
)
for i, dispatch_users in enumerate(all_dispatched_users):
aggregated_dispatched_users = _aggregate_dispatched_users(dispatch_users)
for user_class in [u for u in user_classes if not u.fixed_count]:
target_relative_weight = user_class.weight / sum(
map(attrgetter("weight"), [u for u in user_classes if not u.fixed_count])
)
relative_weight = aggregated_dispatched_users[user_class.__name__] / _user_count(dispatch_users)
error_percent = 100 * (relative_weight - target_relative_weight) / target_relative_weight
if i == len(all_dispatched_users) - 1:
# We want the distribution to be as good as possible at the end of the ramp-up
tol = 0.5
else:
tol = 15
self.assertLessEqual(
error_percent,
tol,
"Distribution for user class {} is off by more than {}% when user count is {}".format(
user_class, tol, _user_count(dispatch_users)
),
)
def test_ramp_down_from_100_000_to_0_users_with_50_user_classes_and_1000_workers_and_5000_spawn_rate(self):
for user_classes in [
self.weighted_user_classes,
self.fixed_user_classes_1M,
self.fixed_user_classes_10k,
self.mixed_users,
]:
initial_user_count = 100_000
workers = [WorkerNode(str(i)) for i in range(1000)]
# Ramp-up
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=initial_user_count, spawn_rate=initial_user_count)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
# Ramp-down
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=5000)
users_dispatcher._wait_between_dispatch = 0
all_dispatched_users = list(users_dispatcher)
tol = 0.2
self.assertTrue(
all(
dispatch_iteration_duration <= tol
for dispatch_iteration_duration in users_dispatcher.dispatch_iteration_durations
),
"One or more dispatch took more than {:.0f}ms to compute (max = {}ms)".format(
tol * 1000, 1000 * max(users_dispatcher.dispatch_iteration_durations)
),
)
self.assertEqual(_user_count(all_dispatched_users[-1]), 0)
for dispatch_users in all_dispatched_users[:-1]:
user_count_on_workers = [
sum(user_classes_count.values()) for user_classes_count in dispatch_users.values()
]
self.assertLessEqual(
max(user_count_on_workers) - min(user_count_on_workers),
1,
"One or more workers have too much users compared to the other workers when user count is {}".format(
_user_count(dispatch_users)
),
)
for dispatch_users in all_dispatched_users[:-1]:
aggregated_dispatched_users = _aggregate_dispatched_users(dispatch_users)
for user_class in [u for u in user_classes if not u.fixed_count]:
target_relative_weight = user_class.weight / sum(
map(attrgetter("weight"), [u for u in user_classes if not u.fixed_count])
)
relative_weight = aggregated_dispatched_users[user_class.__name__] / _user_count(dispatch_users)
error_percent = 100 * (relative_weight - target_relative_weight) / target_relative_weight
tol = 15
self.assertLessEqual(
error_percent,
tol,
"Distribution for user class {} is off by more than {}% when user count is {}".format(
user_class, tol, _user_count(dispatch_users)
),
)
class TestSmallConsecutiveRamping(unittest.TestCase):
def test_consecutive_ramp_up_and_ramp_down(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
user_classes = [User1, User2]
worker_node1 = WorkerNode("1")
worker_node2 = WorkerNode("2")
worker_nodes = [worker_node1, worker_node2]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
# user count = 1
users_dispatcher.new_dispatch(target_user_count=1, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 0})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 0)
# user count = 2
users_dispatcher.new_dispatch(target_user_count=2, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 1)
# user count = 3
users_dispatcher.new_dispatch(target_user_count=3, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 1)
# user count = 4
users_dispatcher.new_dispatch(target_user_count=4, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 2)
# user count = 3
users_dispatcher.new_dispatch(target_user_count=3, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 1)
# user count = 2
users_dispatcher.new_dispatch(target_user_count=2, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 1)
# user count = 1
users_dispatcher.new_dispatch(target_user_count=1, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 0})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 0)
# user count = 0
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 0, "User2": 0})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node1.id), 0)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_node2.id), 0)
class TestRampingMiscellaneous(unittest.TestCase):
def test_spawn_rate_greater_than_target_user_count(self):
class User1(User):
weight = 1
user_classes = [User1]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(1)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=1, spawn_rate=100)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(dispatched_users, {"1": {"User1": 1}})
users_dispatcher.new_dispatch(target_user_count=11, spawn_rate=100)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(dispatched_users, {"1": {"User1": 11}})
users_dispatcher.new_dispatch(target_user_count=10, spawn_rate=100)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(dispatched_users, {"1": {"User1": 10}})
users_dispatcher.new_dispatch(target_user_count=0, spawn_rate=100)
users_dispatcher._wait_between_dispatch = 0
dispatched_users = next(users_dispatcher)
self.assertDictEqual(dispatched_users, {"1": {"User1": 0}})
class TestRemoveWorker(unittest.TestCase):
def test_remove_worker_during_ramp_up(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1, "User3": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 1)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 2)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
def test_remove_two_workers_during_ramp_up(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1, "User3": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 1)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 1)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 2)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
users_dispatcher.remove_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 9)
def test_remove_worker_between_two_ramp_ups(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 6)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 8)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 7)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 6, "User2": 6, "User3": 6})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 9)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 9)
def test_remove_two_workers_between_two_ramp_ups(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
users_dispatcher.remove_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 9)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 12)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 15)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 6, "User2": 6, "User3": 6})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 18)
def test_remove_worker_during_ramp_down(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 5)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 6)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
def test_remove_two_workers_during_ramp_down(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 5)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[1])
users_dispatcher.remove_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 12)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 9)
def test_remove_last_worker(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(1)]
users_dispatcher = UsersDispatcher(worker_nodes=worker_nodes, user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
# Dispatch iteration 1
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1, "User3": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
# Dispatch iteration 2
dispatched_users = next(users_dispatcher)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.remove_worker(worker_nodes[0])
self.assertFalse(users_dispatcher._rebalance)
class TestAddWorker(unittest.TestCase):
def test_add_worker_during_ramp_up(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0], worker_nodes[2]], user_classes=user_classes)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1, "User3": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 1)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 2)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
def test_add_two_workers_during_ramp_up(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0]], user_classes=user_classes)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 1, "User2": 1, "User3": 1})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
users_dispatcher.add_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 2, "User2": 2, "User3": 2})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 2)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 2)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
def test_add_worker_between_two_ramp_ups(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0], worker_nodes[2]], user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 5)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 6, "User2": 6, "User3": 6})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 6)
def test_add_two_workers_between_two_ramp_ups(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0]], user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
users_dispatcher.add_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 5)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 5)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 6, "User2": 6, "User3": 6})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 6)
def test_add_worker_during_ramp_down(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0], worker_nodes[2]], user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 8)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 7)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 6)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 6)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
def test_add_two_workers_during_ramp_down(self):
class User1(User):
weight = 1
class User2(User):
weight = 1
class User3(User):
weight = 1
user_classes = [User1, User2, User3]
worker_nodes = [WorkerNode(str(i + 1)) for i in range(3)]
users_dispatcher = UsersDispatcher(worker_nodes=[worker_nodes[0]], user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=18, spawn_rate=3)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
sleep_time = 0.2 # Speed-up test
users_dispatcher.new_dispatch(target_user_count=9, spawn_rate=3)
users_dispatcher._wait_between_dispatch = sleep_time
# Dispatch iteration 1
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(0 <= delta <= _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 5, "User2": 5, "User3": 5})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 15)
# Dispatch iteration 2
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 12)
self.assertFalse(users_dispatcher._rebalance)
users_dispatcher.add_worker(worker_nodes[1])
users_dispatcher.add_worker(worker_nodes[2])
self.assertTrue(users_dispatcher._rebalance)
# Re-balance
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(
0 <= delta <= _TOLERANCE, "Expected re-balance dispatch to be instantaneous but got {}s".format(delta)
)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 4, "User2": 4, "User3": 4})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 4)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 4)
self.assertFalse(users_dispatcher._rebalance)
# Dispatch iteration 3
ts = time.perf_counter()
dispatched_users = next(users_dispatcher)
delta = time.perf_counter() - ts
self.assertTrue(sleep_time - _TOLERANCE <= delta <= sleep_time + _TOLERANCE, delta)
self.assertDictEqual(_aggregate_dispatched_users(dispatched_users), {"User1": 3, "User2": 3, "User3": 3})
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[0].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[1].id), 3)
self.assertEqual(_user_count_on_worker(dispatched_users, worker_nodes[2].id), 3)
class TestRampUpUsersFromZeroWithFixed(unittest.TestCase):
class RampUpCase:
def __init__(self, fixed_counts: Tuple[int], weights: Tuple[int], target_user_count: int):
self.fixed_counts = fixed_counts
self.weights = weights
self.target_user_count = target_user_count
def __str__(self):
return "<RampUpCase fixed_counts={} weights={} target_user_count={}>".format(
self.fixed_counts, self.weights, self.target_user_count
)
def case_handler(self, cases: List[RampUpCase], expected: Dict[str, int], user_classes: List[User]):
self.assertEqual(len(cases), len(expected))
for case_num in range(len(cases)):
# Reset to default values
for user_class in user_classes:
user_class.weight, user_class.fixed_count = 1, 0
case = cases[case_num]
self.assertEqual(
len(case.fixed_counts) + len(case.weights),
len(user_classes),
msg="Invalid test case or user list.",
)
fixed_users = user_classes[: len(case.fixed_counts)]
weighted_users_list = user_classes[len(case.fixed_counts) :]
for user, fixed_count in zip(fixed_users, case.fixed_counts):
user.fixed_count = fixed_count
for user, weight in zip(weighted_users_list, case.weights):
user.weight = weight
worker_node1 = WorkerNode("1")
users_dispatcher = UsersDispatcher(worker_nodes=[worker_node1], user_classes=user_classes)
users_dispatcher.new_dispatch(target_user_count=case.target_user_count, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = 0
iterations = list(users_dispatcher)
self.assertDictEqual(iterations[-1]["1"], expected[case_num], msg=f"Wrong case {case}")
def test_ramp_up_2_weigted_user_with_1_fixed_user(self):
class User1(User):
...
class User2(User):
...
class User3(User):
...
self.case_handler(
cases=[
self.RampUpCase(fixed_counts=(1,), weights=(1, 1), target_user_count=3),
self.RampUpCase(fixed_counts=(1,), weights=(1, 1), target_user_count=9),
self.RampUpCase(fixed_counts=(8,), weights=(1, 1), target_user_count=10),
self.RampUpCase(fixed_counts=(2,), weights=(1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(100,), weights=(1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(960,), weights=(1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(9990,), weights=(1, 1), target_user_count=10000),
self.RampUpCase(fixed_counts=(100,), weights=(1, 1), target_user_count=100),
],
expected=[
{"User1": 1, "User2": 1, "User3": 1},
{"User1": 1, "User2": 4, "User3": 4},
{"User1": 8, "User2": 1, "User3": 1},
{"User1": 2, "User2": 499, "User3": 499},
{"User1": 100, "User2": 450, "User3": 450},
{"User1": 960, "User2": 20, "User3": 20},
{"User1": 9990, "User2": 5, "User3": 5},
{"User1": 100, "User2": 0, "User3": 0},
],
user_classes=[User1, User2, User3],
)
def test_ramp_up_various_count_weigted_and_fixed_users(self):
class User1(User):
...
class User2(User):
...
class User3(User):
...
class User4(User):
...
class User5(User):
...
self.case_handler(
cases=[
self.RampUpCase(fixed_counts=(), weights=(1, 1, 1, 1, 1), target_user_count=5),
self.RampUpCase(fixed_counts=(1, 1), weights=(1, 1, 1), target_user_count=5),
self.RampUpCase(fixed_counts=(5, 2), weights=(1, 1, 1), target_user_count=10),
self.RampUpCase(fixed_counts=(9, 1), weights=(5, 3, 2), target_user_count=20),
self.RampUpCase(fixed_counts=(996,), weights=(1, 1, 1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(500,), weights=(2, 1, 1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(250, 250), weights=(3, 1, 1), target_user_count=1000),
self.RampUpCase(fixed_counts=(1, 1, 1, 1), weights=(100,), target_user_count=1000),
],
expected=[
{"User1": 1, "User2": 1, "User3": 1, "User4": 1, "User5": 1},
{"User1": 1, "User2": 1, "User3": 1, "User4": 1, "User5": 1},
{"User1": 5, "User2": 2, "User3": 1, "User4": 1, "User5": 1},
{"User1": 9, "User2": 1, "User3": 5, "User4": 3, "User5": 2},
{"User1": 996, "User2": 1, "User3": 1, "User4": 1, "User5": 1},
{"User1": 500, "User2": 200, "User3": 100, "User4": 100, "User5": 100},
{"User1": 250, "User2": 250, "User3": 300, "User4": 100, "User5": 100},
{"User1": 1, "User2": 1, "User3": 1, "User4": 1, "User5": 996},
],
user_classes=[User1, User2, User3, User4, User5],
)
def test_ramp_up_only_fixed_users(self):
class User1(User):
...
class User2(User):
...
class User3(User):
...
class User4(User):
...
class User5(User):
...
self.case_handler(
cases=[
self.RampUpCase(fixed_counts=(1, 1, 1, 1, 1), weights=(), target_user_count=5),
self.RampUpCase(fixed_counts=(13, 26, 39, 52, 1), weights=(), target_user_count=131),
self.RampUpCase(fixed_counts=(10, 10, 10, 10, 10), weights=(), target_user_count=100),
self.RampUpCase(fixed_counts=(10, 10, 10, 10, 10), weights=(), target_user_count=50),
],
expected=[
{"User1": 1, "User2": 1, "User3": 1, "User4": 1, "User5": 1},
{"User1": 13, "User2": 26, "User3": 39, "User4": 52, "User5": 1},
{"User1": 10, "User2": 10, "User3": 10, "User4": 10, "User5": 10},
{"User1": 10, "User2": 10, "User3": 10, "User4": 10, "User5": 10},
],
user_classes=[User1, User2, User3, User4, User5],
)
def test_ramp_up_partially_ramp_down_and_rump_up_to_target(self):
class User1(User):
fixed_count = 50
class User2(User):
fixed_count = 50
target_count = User1.fixed_count + User2.fixed_count
users_dispatcher = UsersDispatcher(worker_nodes=[WorkerNode("1")], user_classes=[User1, User2])
users_dispatcher.new_dispatch(target_user_count=30, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = 0
iterations = list(users_dispatcher)
self.assertDictEqual(iterations[-1]["1"], {"User1": 15, "User2": 15})
users_dispatcher.new_dispatch(target_user_count=20, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = 0
iterations = list(users_dispatcher)
self.assertDictEqual(iterations[-1]["1"], {"User1": 10, "User2": 10})
users_dispatcher.new_dispatch(target_user_count=target_count, spawn_rate=0.5)
users_dispatcher._wait_between_dispatch = 0
iterations = list(users_dispatcher)
self.assertDictEqual(iterations[-1]["1"], {"User1": 50, "User2": 50})
def test_ramp_up_ramp_down_and_rump_up_again(self):
for weights, fixed_counts in [
[(1, 1, 1, 1, 1), (100, 100, 50, 50, 200)],
[(1, 1, 1, 1, 1), (100, 150, 50, 50, 0)],
[(1, 1, 1, 1, 1), (200, 100, 50, 0, 0)],
[(1, 1, 1, 1, 1), (200, 100, 0, 0, 0)],
[(1, 1, 1, 1, 1), (200, 0, 0, 0, 0)],
[(1, 1, 1, 1, 1), (0, 0, 0, 0, 0)],
]:
u1_weight, u2_weight, u3_weight, u4_weight, u5_weight = weights
u1_fixed_count, u2_fixed_count, u3_fixed_count, u4_fixed_count, u5_fixed_count = fixed_counts
class User1(User):
weight = u1_weight
fixed_count = u1_fixed_count
class User2(User):
weight = u2_weight
fixed_count = u2_fixed_count
class User3(User):
weight = u3_weight
fixed_count = u3_fixed_count
class User4(User):
weight = u4_weight
fixed_count = u4_fixed_count
class User5(User):
weight = u5_weight
fixed_count = u5_fixed_count
target_user_counts = [sum(fixed_counts), sum(fixed_counts) + 100]
down_counts = [0, max(min(fixed_counts) - 1, 0)]
user_classes = [User1, User2, User3, User4, User5]
for worker_count in [3, 5, 9]:
workers = [WorkerNode(str(i + 1)) for i in range(worker_count)]
users_dispatcher = UsersDispatcher(worker_nodes=workers, user_classes=user_classes)
for down_to_count in down_counts:
for target_user_count in target_user_counts:
# Ramp-up to go to `target_user_count` #########
users_dispatcher.new_dispatch(target_user_count=target_user_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
for user_class in user_classes:
if user_class.fixed_count:
self.assertEqual(
users_dispatcher._get_user_current_count(user_class.__name__),
user_class.fixed_count,
)
# Ramp-down to go to `down_to_count`
# and ensure the fixed users was decreased too
users_dispatcher.new_dispatch(target_user_count=down_to_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
for user_class in user_classes:
if user_class.fixed_count:
self.assertNotEqual(
users_dispatcher._get_user_current_count(user_class.__name__),
user_class.fixed_count,
)
# Ramp-up go back to `target_user_count` and ensure
# the fixed users return to their counts
users_dispatcher.new_dispatch(target_user_count=target_user_count, spawn_rate=1)
users_dispatcher._wait_between_dispatch = 0
list(users_dispatcher)
for user_class in user_classes:
if user_class.fixed_count:
self.assertEqual(
users_dispatcher._get_user_current_count(user_class.__name__),
user_class.fixed_count,
)
def _aggregate_dispatched_users(d: Dict[str, Dict[str, int]]) -> Dict[str, int]:
user_classes = list(next(iter(d.values())).keys())
return {u: sum(d[u] for d in d.values()) for u in user_classes}
def _user_count(d: Dict[str, Dict[str, int]]) -> int:
return sum(map(sum, map(dict.values, d.values())))
def _user_count_on_worker(d: Dict[str, Dict[str, int]], worker_node_id: str) -> int:
return sum(d[worker_node_id].values())
|
import reg
class TypeRegistry(object):
def __init__(self):
self.types = []
self.schema_name = {}
def register_type(self, name, schema):
if name not in self.types:
self.types.append(name)
self.schema_name[schema] = name
def get_typeinfo(self, name, request):
try:
factory = request.app.get_typeinfo_factory(name)
except NotImplementedError:
factory = None
if factory is None:
raise KeyError('No type info registered for %s' % name)
result = factory(request) # self.typeinfo_factories[name](request)
result['name'] = name
return result
def get_typeinfos(self, request):
res = {}
for k in self.types:
res[k] = self.get_typeinfo(k, request)
return res
def get_typeinfo_by_schema(self, schema, request):
name = self.schema_name.get(schema, None)
if name is None:
raise KeyError('No type info registered for %s' % schema)
return self.get_typeinfo(name, request)
|
"""Views for Rider APp"""
from rest_framework.generics import ListAPIView, RetrieveAPIView, UpdateAPIView
from .serializers import ListDeliverySerializer, UpdateDeliveryStatusSerializer, RetrieveDeliverySerializer
from .models import Delivery
class ListDeliveryAPIView(ListAPIView):
"""View for listing Delivery"""
serializer_class = RetrieveDeliverySerializer
def get_queryset(self):
"""Returns Queryset"""
return Delivery.objects.filter(rider=self.request.user)
class RetrieveDeliveryAPIView(RetrieveAPIView):
"""View for retrieving Delivery data"""
serializer_class = RetrieveDeliverySerializer
def get_queryset(self):
"""Returns Queryset"""
return Delivery.objects.filter(rider=self.request.user)
class UpdateDeliveryStatusAPIView(UpdateAPIView):
"""View for updating delivery status"""
serializer_class = UpdateDeliveryStatusSerializer
def get_queryset(self):
"""Returns Queryset"""
return Delivery.objects.filter(rider=self.request.user)
def get_object(self):
"""Returns object to be updated"""
delivery = Delivery.objects.get(id=self.kwargs.get('pk'))
return delivery
|
from .types import Current, Voltage, PowerFactor, ActivePower, ReactivePower
from . import tools
from .core import MeterBase
class NevaMT3(MeterBase):
"""Base class for three-phase meters (Neva MT 3xx)."""
# def __init__(self, interface: str, address: str = "", password: str = "",
# initial_baudrate: int = 0):
# super().__init__(interface, address, password, initial_baudrate)
# self.obis_codes.voltage_l1 = "20.07.00*FF"
# self.obis_codes.voltage_l2 = "34.07.00*FF"
# self.obis_codes.voltage_l3 = "48.07.00*FF"
# self.obis_codes.active_power_l1 = "24.07.00*FF"
# self.obis_codes.active_power_l2 = "38.07.00*FF"
# self.obis_codes.active_power_l3 = "4C.07.00*FF"
# self.obis_codes.active_power_sum = "10.07.00*FF"
# self.obis_codes.current_l1 = "1F.07.00*FF"
# self.obis_codes.current_l2 = "33.07.00*FF"
# self.obis_codes.current_l3 = "47.07.00*FF"
# self.obis_codes.power_factor_l1 = "21.07.FF*FF"
# self.obis_codes.power_factor_l2 = "35.07.FF*FF"
# self.obis_codes.power_factor_l3 = "49.07.FF*FF"
@property
def voltage_l1(self) -> float:
"""Instantaneous voltage in phase L1 [V]."""
self.send(tools.make_cmd_msg(self.obis_codes.voltage_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def voltage_l2(self) -> float:
"""Instantaneous voltage in phase L2 [V]."""
self.send(tools.make_cmd_msg(self.obis_codes.voltage_l2))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def voltage_l3(self) -> float:
"""Instantaneous voltage in phase L3 [V]."""
self.send(tools.make_cmd_msg(self.obis_codes.voltage_l3))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def voltage(self) -> Voltage:
"""Instantaneous voltages of all phases [V]."""
return Voltage(self.voltage_l1, self.voltage_l2, self.voltage_l3)
@property
def current_l1(self) -> float:
"""Instantaneous current in phase L1 [A]."""
self.send(tools.make_cmd_msg(self.obis_codes.current_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def current_l2(self) -> float:
"""Instantaneous current in phase L2 [A]."""
self.send(tools.make_cmd_msg(self.obis_codes.current_l2))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def current_l3(self) -> float:
"""Instantaneous current in phase L3 [A]."""
self.send(tools.make_cmd_msg(self.obis_codes.current_l3))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def current(self) -> Current:
"""Instantaneous currents of all phases [A]."""
return Current(self.current_l1, self.current_l2, self.current_l3)
def __get_power_factor(self, obis: str) -> str:
y = ("C", "L", "?")
self.send(tools.make_cmd_msg(obis))
resp = tools.parse_data_msg(self.recv(19)).data[0]
return y[int(resp[0])] + str(float(resp[1:]))
@property
def power_factor_l1(self) -> str:
"""Power factor in phase L1."""
return self.__get_power_factor(self.obis_codes.power_factor_l1)
@property
def power_factor_l2(self) -> str:
"""Power factor in phase L2."""
return self.__get_power_factor(self.obis_codes.power_factor_l3)
@property
def power_factor_l3(self) -> str:
"""Power factor in phase L3."""
return self.__get_power_factor(self.obis_codes.power_factor_l3)
@property
def power_factor(self) -> PowerFactor:
"""Power factors of all phases."""
return PowerFactor(self.power_factor_l1, self.power_factor_l2, self.power_factor_l3)
@property
def active_power_l1(self) -> float:
"""Active instantaneous power in phase L1 [W]."""
self.send(tools.make_cmd_msg(self.obis_codes.active_power_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def active_power_l2(self) -> float:
"""Active instantaneous power in phase L2 [W]."""
self.send(tools.make_cmd_msg(self.obis_codes.active_power_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def active_power_l3(self) -> float:
"""Active instantaneous power in phase L3 [W]."""
self.send(tools.make_cmd_msg(self.obis_codes.active_power_l2))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def active_power_sum(self) -> float:
"""Sum of active instantaneous power of all phases [W]."""
self.send(tools.make_cmd_msg(self.obis_codes.active_power_sum))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def active_power(self) -> ActivePower:
"""Active instantaneous power of all phases and total [W]."""
return ActivePower(l1=self.active_power_l1, l2=self.active_power_l2,
l3=self.active_power_l3, total=self.active_power_sum)
class NevaMT3R(NevaMT3):
"""Class for meters Neva MT3XX supporting reactive energy."""
#
# def __init__(self, interface: str, address: str = "", password: str = "",
# initial_baudrate: int = 0):
# super().__init__(interface, address, password, initial_baudrate)
# self.obis_codes.positive_reactive_power_l1 = "17.07.01*FF"
# self.obis_codes.negative_reactive_power_l1 = "18.07.01*FF"
# self.obis_codes.positive_reactive_power_l2 = "2B.07.01*FF"
# self.obis_codes.negative_reactive_power_l2 = "2C.07.01*FF"
# self.obis_codes.positive_reactive_power_l3 = "3F.07.01*FF"
# self.obis_codes.negative_reactive_power_l3 = "40.07.01*FF"
# self.obis_codes.positive_reactive_power_sum = "03.07.01*FF"
# self.obis_codes.negative_reactive_power_sum = "04.07.01*FF"
@property
def positive_reactive_power_l1(self) -> float:
"""Positive reactive power of phase L1."""
self.send(tools.make_cmd_msg(self.obis_codes.positive_reactive_power_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def negative_reactive_power_l1(self) -> float:
"""Negative reactive power of phase L1."""
self.send(tools.make_cmd_msg(self.obis_codes.negative_reactive_power_l1))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def positive_reactive_power_l2(self) -> float:
"""Positive reactive power of phase L2."""
self.send(tools.make_cmd_msg(self.obis_codes.positive_reactive_power_l2))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def negative_reactive_power_l2(self) -> float:
"""Negative reactive power of phase L2."""
self.send(tools.make_cmd_msg(self.obis_codes.negative_reactive_power_l2))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def positive_reactive_power_l3(self) -> float:
"""Positive reactive power of phase L3."""
self.send(tools.make_cmd_msg(self.obis_codes.positive_reactive_power_l3))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def negative_reactive_power_l3(self) -> float:
"""Negative reactive power of phase L3."""
self.send(tools.make_cmd_msg(self.obis_codes.negative_reactive_power_l3))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def positive_reactive_power_sum(self) -> float:
"""Sum of all positive reactive powers."""
self.send(tools.make_cmd_msg(self.obis_codes.positive_reactive_power_sum))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def negative_reactive_power_sum(self) -> float:
"""Sum of all negative reactive powers."""
self.send(tools.make_cmd_msg(self.obis_codes.negative_reactive_power_sum))
return float(tools.parse_data_msg(self.recv(20)).data[0])
@property
def reactive_power(self) -> ReactivePower:
"""All reactive powers (positive and negative)."""
return ReactivePower(self.positive_reactive_power_l1, self.negative_reactive_power_l1,
self.positive_reactive_power_l2, self.negative_reactive_power_l2,
self.positive_reactive_power_l3, self.negative_reactive_power_l3,
self.positive_reactive_power_sum, self.negative_reactive_power_sum)
class NevaMT324AOS(NevaMT3):
"""Class for meters Neva MT324AOS."""
@property
def status(self) -> dict[str, bool]:
"""Current status of the meter."""
self.send(tools.make_cmd_msg(self.obis_codes.status))
response = tools.parse_data_msg(self.recv(17)).data[0]
status = ("bodyIsOpen", "terminalCoverIsRemoved", "loadIsConnected", "loadIsDisconnected",
"failedToChangeRelayStatus", "influenceOfMagneticField", "wrongWired",
"dataMemoryICWorkError", "paramMemoryWorkError", "powerICError",
"clockOrCalendarFailure", "batteryDischarge",
"triggerOfButtonOfProgrammingPermission", "dataMemoryFailure",
"paramMemoryFailure")
bits = f'{int(response, 16):0>16b}'
return dict((status[idx], bit == "1") for idx, bit in enumerate(bits[:7] + bits[8:]))
@property
def power_factor_l1(self) -> float:
"""Power factor in phase L1."""
self.send(tools.make_cmd_msg(self.obis_codes.power_factor_l1))
return float(tools.parse_data_msg(self.recv(18)).data[0])
@property
def power_factor_l2(self) -> float:
"""Power factor in phase L2."""
self.send(tools.make_cmd_msg(self.obis_codes.power_factor_l2))
return float(tools.parse_data_msg(self.recv(18)).data[0])
@property
def power_factor_l3(self) -> float:
"""Power factor in phase L3."""
self.send(tools.make_cmd_msg(self.obis_codes.power_factor_l3))
return float(tools.parse_data_msg(self.recv(18)).data[0])
class NevaMT324R(NevaMT3R):
"""Class for working with electricity meters Neva MT 324
supporting reactive energy.
"""
|
"""
Classic task, a kind of walnut for you
Given four lists A, B, C, D of integer values,
compute how many tuples (i, j, k, l) there are
such that A[i] + B[j] + C[k] + D[l] is zero.
We guarantee, that all A, B, C, D have same length of N where 0 ≤ N ≤ 1000.
"""
from collections import Counter
from itertools import product
from typing import List
def check_sum_of_four(
a_values: List[int], b_values: List[int], c_values: List[int], d_values: List[int]
) -> int:
"""
Returns number of zero-sum tuples (`a_entry`, `b_entry`, `c_entry`, `d_entry`)
which are constructed from `a_values`, `b_values`, `c_values`, `d_values`.
Requirements:
-------------
`a_values`, `b_values`, `c_values`, `d_values` have same length N,
where 0 ≤ N ≤ 1000.
Algorithm:
----------
+ Complexity O(N^4) simplified to O(N^2) by getting two arrays
from all unique combinations in arrays pairs (ab and cd).
+ In the new two arrays, we count a number of identical sums.
+ For each equivalent, pair get a product of the counters
(a i.m. number of combinations).
+ Finally, calculate the sum of all products (i.m. sum of all of the combinations).
See [C++ realization](
https://stackoverflow.com/questions/40575323/sum-of-4-integers-in-4-arrays).
"""
ab_sum_counter = Counter(sum(pair) for pair in product(a_values, b_values))
cd_sum_counter = Counter(sum(pair) for pair in product(c_values, d_values))
return sum(
ab_sum_counter[ab_sum] * cd_sum_counter[-ab_sum]
for ab_sum in ab_sum_counter
if -ab_sum in cd_sum_counter
)
|
from __future__ import absolute_import, division, print_function
from crys3d.hklview.frames import *
from crys3d.hklview import view_2d
import wx
import os
class twin_settings_window (settings_window) :
def add_value_widgets (self, sizer) :
sizer.SetRows(4)
sizer.Add(wx.StaticText(self.panel, -1, "Value 1:"), 0,
wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
self.value_info_1 = wx.TextCtrl(self.panel, -1, size=(80,-1),
style=wx.TE_READONLY)
sizer.Add(self.value_info_1, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(wx.StaticText(self.panel, -1, "Value 2:"), 0,
wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
self.value_info_2 = wx.TextCtrl(self.panel, -1, size=(80,-1),
style=wx.TE_READONLY)
sizer.Add(self.value_info_2, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
def update_reflection_info (self, hkl, d_min, value_1, value_2) :
print(hkl, value_1, value_2)
if (hkl is None) :
self.hkl_info.SetValue("")
self.d_min_info.SetValue("")
self.value_info.SetValue("")
else :
self.hkl_info.SetValue("%d, %d, %d" % hkl)
d_min_str = format_value("%.3g", d_min)
self.d_min_info.SetValue(d_min_str)
value_str_1 = format_value("%.3g", value_1, replace_none_with="---")
self.value_info_1.SetValue(value_str_1)
value_str_2 = format_value("%.3g", value_2, replace_none_with="---")
self.value_info_2.SetValue(value_str_2)
class twin_viewer_panel (wx.Panel) :
def __init__ (self, *args, **kwds) :
wx.Panel.__init__(self, *args, **kwds)
self.settings = self.GetParent().settings
szr = wx.BoxSizer(wx.HORIZONTAL)
self.SetSizer(szr)
self.view_1 = view_2d.hklview_2d(self, -1, size=(480,480))
self.view_2 = view_2d.hklview_2d(self, -1, size=(480,480))
self.view_1.SetMinSize((480,480))
self.view_2.SetMinSize((480,480))
szr.Add(self.view_1, 1, wx.EXPAND)
szr.Add(self.view_2, 1, wx.EXPAND)
self.SetMinSize((960,480))
szr.Fit(self)
def add_view_specific_functions (self) :
pass
def _propagate_action (self, name, *args, **kwds) :
for viewer in [self.view_1, self.view_2] :
getattr(viewer, name)(*args, **kwds)
def clear_labels (self) :
self._propagate_action("clear_labels")
def set_miller_arrays (self, array1, array2) :
self.view_1.set_miller_array(array1)
self.view_2.set_miller_array(array2)
self.Refresh()
def update_settings (self, *args, **kwds) :
self.view_1.update_settings(*args, **kwds)
self.view_2.update_settings(*args, **kwds)
self.Refresh()
def update_clicked (self, index) :
hkl_1, d_min_1, value_1 = self.view_1.scene.get_reflection_info(index)
hkl_2, d_min_2, value_2 = self.view_2.scene.get_reflection_info(index)
assert (hkl_1 == hkl_2)
self.GetParent().update_reflection_info(
hkl=hkl_1,
d_min=d_min_1,
value_1=value_1,
value_2=value_2)
def save_screen_shot (self, file_name, extensions=None) :
base, ext = os.path.splitext(file_name)
file_1 = base + "_1" + ext
file_2 = base + "_2" + ext
self.view_1.save_screen_shot(file_1)
self.view_2.save_screen_shot(file_2)
class ComparisonFrame (HKLViewFrame) :
def __init__ (self, *args, **kwds) :
HKLViewFrame.__init__(self, *args, **kwds)
self._array1 = None
self._array2 = None
def create_viewer_panel (self) :
self.viewer = twin_viewer_panel(self)
self.viewer.SetMinSize((960, 480))
def update_reflection_info (self, *args, **kwds) :
self.settings_panel.update_reflection_info(*args, **kwds)
def create_settings_panel (self) :
self.settings.expand_to_p1 = True
self.settings.expand_anomalous = True
self.settings.slice_mode = True
#self.settings.black_background = False
self.settings_panel = twin_settings_window(self, style=wx.RAISED_BORDER)
def update_settings_for_merged (self) :
self.settings.expand_to_p1 = True
self.settings.expand_anomalous = True
def update_settings (self, *args, **kwds) :
if (None in [self._array1, self._array2]) :
return False
self.viewer.update_settings(*args, **kwds)
def SetupMenus (self) :
self.menubar = wx.MenuBar(-1)
self.file_menu = wx.Menu()
self.menubar.Append(self.file_menu, "File")
item = wx.MenuItem(self.file_menu, -1, "Load data...\tCtrl-O")
self.Bind(wx.EVT_MENU, self.OnLoadFile, item)
self.file_menu.AppendItem(item)
def OnLoadFile (self, evt) :
file_name_1 = wx.FileSelector("Reflections file 1",
wildcard="Reflection files (*.mtz, *.sca, *.hkl)|*.mtz;*.sca;*.hkl",
default_path="",
flags=wx.FD_OPEN)
file_name_2 = wx.FileSelector("Reflections file 2",
wildcard="Reflection files (*.mtz, *.sca, *.hkl)|*.mtz;*.sca;*.hkl",
default_path="",
flags=wx.FD_OPEN)
self.load_files(file_name_1, file_name_2)
def load_files (self, file_name_1, file_name_2) :
array1 = self.load_reflections_file(file_name_1, set_array=False,
data_only=True)
array2 = self.load_reflections_file(file_name_2, set_array=False,
data_only=True)
symm1 = array1.crystal_symmetry()
symm2 = array2.crystal_symmetry()
if (symm1 is None) :
raise Sorry(("No crystal symmetry found in %s! Please convert to a "+
"more appropriate format.") % file_name_1)
if (symm2 is None) :
raise Sorry(("No crystal symmetry found in %s! Please convert to a "+
"more appropriate format.") % file_name_2)
if (symm1.unit_cell() is None) :
symm1 = symm1.customized_copy(unit_cell=symm2.unit_cell())
array1 = array1.customized_copy(crystal_symmetry=symm1)
if (symm2.unit_cell() is None) :
symm2 = symm2.customized_copy(unit_cell=symm1.unit_cell())
array2 = array2.customized_copy(crystal_symmetry=symm2)
if (not array1.is_similar_symmetry(array2)) :
from cctbx import crystal
space_group_1 = array1.space_group_info()
space_group_2 = array2.space_group_info()
if (str(space_group_1) != str(space_group_2)) :
# TODO need to figure out if these are really incompatible!
confirm = wx.MessageBox(("The space groups for the two datasets are "+
"different (%s versus %s). The space group from the first dataset "+
"will be used for both.") % (space_group_1, space_group_2),
style=wx.OK)
unit_cell_1 = array1.unit_cell()
unit_cell_2 = array2.unit_cell()
if (not unit_cell_1.is_similar_to(unit_cell_2)) :
uc_str_1 = "%g %g %g %g %g %g" % unit_cell_1.parameters()
uc_str_2 = "%g %g %g %g %g %g" % unit_cell_2.parameters()
confirm = wx.MessageBox(("The unit cells for the two datasets are "+
"different (%s versus %s). The unit cell from the first dataset "+
"will be used to calculated the resolution of clicked reflections.")%
(uc_str_1, uc_str_2),
style=wx.OK)
symm = crystal.symmetry(
space_group_info=space_group_1,
unit_cell=unit_cell_1)
array2 = array2.customized_copy(crystal_symmetry=symm)
if (array1.anomalous_flag() != array2.anomalous_flag()) :
wx.MessageBox("Warning: only one array contains anomalous data; to "+
"allow comparison, Bijvoet mates will be generated for the "+
"non-anomalous array.")
if (not array1.anomalous_flag()) :
array1 = array1.generate_bijvoet_mates()
else :
array2 = array2.generate_bijvoet_mates()
array1 = array1.common_set(other=array2)
array2 = array2.common_set(other=array1)
is_intensities = [ array1.is_xray_intensity_array(),
array2.is_xray_intensity_array() ]
if (len(set(is_intensities)) == 2) :
convert = wx.MessageBox("You appear to be comparing intensities with "+
"another type of data. Do you want to convert the intensities to "+
"amplitudes? If you leave them as intensities the program will "+
"still run, but the scale of the data may be much different.",
style=wx.YES_NO)
if (convert == wx.YES) :
if (array1.is_xray_intensity_array()) :
array1 = array1.f_sq_as_f()
else :
array2 = array2.f_sq_as_f()
self._array1 = array1
self._array2 = array2
self.settings_panel.d_min_ctrl.SetValue(array1.d_min())
self.settings_panel.d_min_ctrl.SetRange(array1.d_min(), 20.0)
self.settings_panel.set_index_span(array1.index_span())
self.settings_panel.update_space_group_choices(array1)
self.viewer.set_miller_arrays(array1, array2)
def add_view_specific_functions (self) :
pass
def delete_miller_index (self, hkl) :
self._array1 = self._array1.delete_index(hkl)
self._array2 = self._array2.delete_index(hkl)
self.viewer.set_miller_arrays(self._array1, self._array2)
|
import os
import re
import numpy as np
def line2float(line):
data=list()
newstr=str()
data.append(newstr)
index=0
for i in line:
if((i<='9'and i>='0') or i =='.'):
data[index]+=i
elif(i ==','):
index+=1
newstr=str()
data.append(newstr)
for i in range(len(data)):
data[i]=float(data[i])
return data
|
"""Manage outbound ON command to a device."""
from ...topics import ON_FAST
from .. import direct_ack_handler
from .direct_command import DirectCommandHandlerBase
class OnFastCommand(DirectCommandHandlerBase):
"""Manage an outbound ON command to a device."""
def __init__(self, address, group):
"""Init the OnFastCommand class."""
super().__init__(topic=ON_FAST, address=address, group=group)
# pylint: disable=arguments-differ
def send(self, on_level=0xFF):
"""Send the ON FAST command."""
super().send(on_level=0xFF)
# pylint: disable=arguments-differ
async def async_send(self, on_level=0xFF):
"""Send the ON FAST command async."""
return await super().async_send(on_level=on_level, group=self._group)
@direct_ack_handler
def handle_direct_ack(self, cmd1, cmd2, target, user_data, hops_left):
"""Handle the ON FAST response direct ACK."""
self._call_subscribers(on_level=cmd2 if cmd2 else 0xFF)
super().handle_direct_ack(cmd1, cmd2, target, user_data, hops_left)
|
# Vocabulary of known English words.
#
# The vocabulary is mostly based on ntlk brown corpus, but we add some words
# and exclude some words. These will likely need to be tweaked semi-frequently
# to add support for unrecognized sense descriptions.
#
# Copyright (c) 2020-2021 Tatu Ylonen. See file LICENSE and https://ylonen.org
import nltk
from nltk.corpus import brown
from .form_descriptions import known_firsts, known_species # w/ our additions
# Download Brown corpus if not already downloaded
nltk.download("brown", quiet=True)
# English words added to the default set from Brown corpus. Multi-word
# expressions separated by spaces can also be added but must match the whole
# text (they can be used when we don't want to add the components).
additional_words = set([
"'",
"ʹ",
".",
";",
":",
"!",
"‘",
"’",
'"',
'“',
'”',
'"',
',',
"…",
'...',
'“.”',
'—',
'€',
'1st',
'2nd',
'3rd',
'4th',
'5th',
'6th',
'7th',
'8th',
'9th',
'10th',
'100th',
'AIDS',
'AM',
'ATP',
'Ada Semantic Interface Specification',
'Afghanistan',
'Al Jazeera',
'Albulidae',
'Apple',
'Arabic kaf',
'Arabic waw',
'Aristophanean',
'ASCII',
'BBC',
'BDSM',
'BMW',
'BS',
'Bardet-Biedl syndrome',
'Beetle',
'Bekenstein-Hawking entropy',
'Blu-ray',
'Blu-ray Disc',
'Bohai Sea',
'Caniformia',
'Canoidea',
'Caprobrotus',
'Chaetodontinae',
'Common',
'Compatibility Decomposition',
'Coriandum',
'Cryptodiran',
'Czech',
'Dirac',
'Dr',
'Epiprocta',
'Esau',
'Eucharist',
'Euclidean',
'Exmoor',
'Feliformia',
'Feloidea',
'GUI',
'GameCube',
'Global Positioning System',
'Guantanamo',
'Gurmukhi digits',
'HCHO',
'HMMWV',
'HTTP',
'Handedness',
'Hearthstone',
'Hollandic',
'Horae',
'Hue Whiteness Blackness',
'I Ching hexagrams',
'IPA',
'ISO',
'Indo',
'Inoperable',
'Internet',
'Judeo',
'LGBT',
'Lagerstomia',
'Latinized',
'Linux',
'Lunar Module',
'Lyman continuum photon',
'Mac',
'Mach',
'Markarian',
'Masturbation',
'Maulisa',
"McDonald's",
'Mercenaria',
'Merseyside',
'Metric',
'Monetaria',
'Mr',
'Mr Spock',
'Mrs',
'Ms',
'Mugillidae',
'Multiples',
'NCO',
'Nepali',
'New',
'Nintendo',
'Noh',
'Numbers',
'Nun',
'Onchorhynchus',
'Orgasm',
'OS',
'Palmaiola',
'Pentecost',
'Phoenician',
'Plebidonax',
'PM',
'Pornography',
'Prof',
'Roma',
'Romani',
'Russian krai',
'Russophile',
'SARS',
'SI',
'Sandwich',
'Saskatchewan',
'Shahmukhi digits',
'Silent Information Regulator',
'Sony',
'Southern',
'Spanish-speaking',
'THz',
'Tamil digits',
'Telugu digits',
'Turkic',
'Twitter',
'UAV',
'USB',
'USD',
'USSF',
'Unicode',
'Uranus',
'Urdu digits',
'Valais',
'Volkswagen',
'X-Files',
'WC',
'WW2',
'Wallis',
'Web',
'Wi-Fi',
'Windows',
'World',
'XML Paper Specification',
'abbreviation',
'abdicate',
'abdication',
'abhor',
'abhorrence',
'abnormality',
'abiotic',
'aboriginals',
'aborted',
'abouts',
'abrasive',
'abridging',
'abscess',
'absorbent',
'abstinent',
'abuser',
'acanthesthesia',
'accusatorial',
'acetous',
'acetylcarnitine',
'acetylsalicylic',
'acidic',
'acne',
'acorn',
'acquiescent',
'acrimonious',
'acrimony',
'acromegaly',
'activist',
'acyclic',
'acyl',
'addict',
'addend',
'adicity',
'admonish',
'adornment',
'adpositions',
'adulterer',
'adulterous',
'aeroplane',
'affectedly',
'affixes',
'affordable',
'afterthought',
'agnathia',
'agoraphobia',
'agression',
'aground',
'airbag',
'airtight',
'ait',
'albumen',
'alchemist',
'aldehyde',
'aldohexose',
'alga',
'alimentary',
'aliphatic',
'allele',
'allergen',
'allergological',
'alleyway',
'allotrope',
'allude',
'almond',
'alms',
'alphabets',
'alpine',
'ambergris',
'ammeter',
'amoeba',
'amorously',
'amphetamine',
'amphibian',
'amphibole',
'amputate',
'anachronistic',
'anaemia',
'anaemic',
'anal',
'angiosperms',
'angiotensin',
'angled',
'angler',
'angleworm',
'anglicism',
'angstrom',
'anilingus',
'annealing',
'annexation',
'anno',
'annoyingly',
'annuity',
'annul',
'anoint',
'ante',
'antechamber',
'anteroposterior',
'anthill',
'anti-doping',
'anti-streptolysin',
'anticlimax',
'anticline',
'anticlockwise',
'antipyretic',
'antisense',
'antonym',
'antonymous',
'anus',
'anxiogenic',
'aortic',
'apatite',
'aphaeretic',
'aphorisms',
'apollonian',
'apologue',
'apostrophe',
'applique',
'appendage',
'appendectomy',
'appendicitis',
'appentice',
'appetising',
'apprentice',
'approvable',
'aquarium',
'aquatic',
'arachnid',
'archer',
'argipalla',
'arity',
'armour',
'armoured',
'aromantic',
'arse',
'arsenolite',
'artifact',
'artwork',
'asbestiform',
'aspirate',
'asscheek',
'assuaging',
'astrological',
'atrium',
'audiovisual',
'averring',
'avoirdupois',
'babble',
'backup',
'bagpiper',
'ballcourt',
'ballgame',
'ballpoint',
'bamboo',
'banality',
'banknote',
'barb',
'barefaced',
'barrister',
'barter',
'basset',
'bathhouse',
'batty',
'bead',
'beak',
'begrudging',
'belittle',
'belladona',
'benefice',
'benzoyl',
'bequeath',
'berbicara',
'bereave',
'bereaved',
'bestiality',
'bianwen',
'bidirectional',
'bigwig',
'bilberry',
'birthmark',
'blabs',
'blackbird',
'bladder',
'blastula',
'blockhead',
'bloodworts',
'blotches',
'bluefin',
'blurring',
'bob',
'bobbin',
'bodyfat',
'bogaraveo',
'bollard',
'bonsai',
'bobsledding',
'bookmaker',
'bootleg',
'boozy',
'botcher',
'bottomed',
'boyfriend',
'bra',
'braid',
'braking',
'breakdancer',
'breastplate',
'breathalyzer',
'bribery',
'brier',
'brimless',
'brimming',
'bristletail',
'broadsword',
'browse',
'browser',
'brutish',
'bung',
'burbot',
'burti',
'byte',
'caesura',
'caipira',
'calcareous',
'calculator',
'camouflaging',
'canal',
'canard',
'candensis',
'canid',
'cannabis',
'canoer',
'canoeist',
'canton',
'capercaillie',
'caprice',
'capriciously',
'caption',
'carbonate',
'carbonated',
'carex',
'carnivoran',
'carnivore',
'carnivorous',
'carpus',
'cartilaginous',
'cartload',
'carucates',
'cashier',
'cassette',
'cassia',
'cassowary',
'castellan',
'castes',
'castrated',
'cataract',
'catastrophist',
'cation',
'cauldron',
'causer',
'caustic',
'cedar',
'celluloid',
'censoring',
'centralised',
'cerebropathy',
'ceremonious',
'cervical',
'cetacean',
'chainsaw',
'chaste',
'chastely',
'chav',
'cheeky',
'cheerless',
'cheetahs',
'cheque',
'chessman',
'chesspiece',
'chewable',
'chlorofluorocarbon',
'chopsticks',
'chrysantemum',
'churl',
'cinnabar',
'cinnamon',
'circumcised',
'circumvent',
'citronella',
'clade',
'clamp',
'clapper',
'classifier',
'cleanliness',
'cleave',
'clef',
'clitoral',
'clitoris',
'cloister',
'coatroom',
'cobbled',
'cockfighting',
'coddle',
'codlings',
'codomain',
'coenzyme',
'cogwheel',
'cohabit',
'coinage',
'collectivisation',
'collide',
'colour',
'colourless',
'columbium',
'combinatorial',
'commandery',
'commemoration',
'common linnet et al',
'compasses',
'complainer',
'comprehensible',
'conceit',
'concha',
'concubine',
'condiment',
'condom',
'conductance',
'confection',
'conformable',
'conforming',
'congeal',
'congealable',
'congee',
'conical',
'conjuring',
'connector',
'consession',
'console',
'constable',
'constellation',
'contaminant',
'contemn',
'contort',
'contractions',
'coolie',
'copula',
'copular',
'copulate',
'copulation',
'cornel',
'cornucopiodes',
'corvid',
'cosmogony',
'costermonger',
'councillor',
'counsellor',
'countably',
'counterintuitive',
'countrified',
'courier',
'cowpat',
'cowshed',
'crabby',
'cracker',
'cranberry',
'crayon',
'creatine',
'creatinine',
'creditor',
'cremation',
'creole',
'crewed',
'cribbage',
'cricketer',
'cringe',
'criticise',
'croissant',
'croquet',
'crossbar',
'crossbow',
'crossword',
'crosswords',
'crumb',
'crustacean',
'crustaceans',
'crybaby',
'cuckoldry',
'cuckoo',
'cucumber',
'cuirass',
'cultivar',
'culvert',
'cum',
'cursive',
'curvaceous',
'custard',
'cutie',
'cuttlefish',
'cutlery',
'cybernetics',
'cycling',
'cyclone',
'cypro',
'cytopharynx',
'dab',
'daimyo',
'daresay',
'darken',
'dart',
'dawdle',
'daydream',
'deaconship',
'debased',
'debit',
'decaffeinated',
'decapod',
'deceitfulness',
'decipher',
'deciphered',
'decoction',
'defamatory',
'defame',
'defecation',
'defile',
'definiteness',
'degenerate',
'dehusking',
'deifying',
'deity',
'dejected',
'deleted',
'deltoidal',
'dementia',
'demo',
'demolish',
'demonym',
'denim',
'denture',
'deponent',
'depressed',
'derisorily',
'designator',
'desorption',
'despicable',
'detent',
'dexterous',
'diacritics',
'diaeresis',
'diaper',
'dictionaries',
'digressing',
'digust',
'dike',
'dimness',
'diplomatique',
'dipterous',
'disadvantageous',
'disallow',
'disavow',
'discoloured',
'disconnect',
'disconnection',
'discrepant',
'disembark',
'dishonour',
'dispensable',
'dispirited',
'displeasing',
'disputatively',
'disrespectful',
'diss',
'dissipatedisyllabicity',
'distaff',
'disulfide',
'doer',
'dogfight',
'dogfish',
'domesticated',
'doorhandle',
'doorpost',
'dorsal',
'dotard',
'doughnut',
'download',
'downmarket',
'doyen',
'dreadlock',
'dreadlocks',
'dredge',
'duckling',
'dude',
'dull-witted',
'dunce',
'dupe',
'duplicating',
'duplicity',
'dye',
'dyes',
'dyestuff',
'eater',
'eavesdrop',
'echinoderms',
'eclectic',
'ecosystem',
'ecstacy',
'ectoderm',
'effervescing',
'egregious',
'eigenvector',
'ejaculate',
'ejaculation',
'electromechanical',
'electroplate',
'elephantiasis',
'em dash',
'emaciation',
'email',
'emoticon',
'encasing',
'encephalomyelitis',
'enclitic',
'enclose',
'enforcer',
'engrave',
'engross',
'enliven',
'enquire',
'entangle',
'entangled',
'entice',
'entitlement',
'entrails',
'entrenchment',
'enumerate',
'enumerating',
'envelops',
'epichoric',
'epilepsy',
'epistle',
'equinox',
'esophagus',
'espresso',
'estrange',
'etc',
'etching',
'ethane',
'ethnicity',
'ethology',
'ethylene',
'euro',
'euthanize',
'evergreen',
'exaction',
'exam',
'exclesior',
'excommunication',
'excrement',
'excrete',
'excretement',
'exhale',
'exhort',
'exine',
'explainable',
'expletive',
'extortion',
'extravagantly',
'extraverted',
'eyelet',
'factious',
'faeces',
'faggot',
'fairground',
'falsely',
'fandom',
'fanfiction',
'fart',
'farthing',
'fastener',
'feces',
'feigns',
'feline',
'felines',
'fellatio',
'fellator',
'feminin',
'fend',
'feng',
'feng shui',
'fengshui',
'feral',
'fester',
'fetter',
'fewness',
'fiancé',
'fiancée',
'fibre',
'figuratively',
'filches',
'filching',
'fillet',
'fillets',
'filterer',
'filtration',
'finalise',
'firearm',
'firebreak',
'firefighter',
'fireside',
'firmware',
'fishnet',
'fishy',
'fissure',
'flatbed',
'flattish',
'flavour',
'flea',
'flightless',
'foehn',
'fondle',
'footprint',
'footrest',
'fop',
'forcefully',
'ford',
'foreshow',
'fossil',
'fraternal',
'fratricide',
'fraudulent',
'fraudulently',
'fredag',
'freemasonic',
'freestyle',
'frequentative',
'freshwater',
'fridge',
'frigate',
'frisson',
'fritter',
'frontflip',
'frontotemporal',
'frugal',
'fulfilment',
'fumigating',
'functionality',
'fundoshi',
'furry',
'furthest',
'gadoid',
'gameplay',
'gamling',
'gastropod',
'gatepost',
'gelatinous',
'gemstone',
'genderqueer',
'genealogy',
'generative',
'generic',
'generically',
'genericized',
'genital',
'genitalia',
'genitals',
'genitourinary',
'genus',
'geometrid',
'getter',
'ghostwriter',
'giga-',
'giraffe',
'girder',
'girlfriend',
'ginseng',
'gizzard',
'glans',
'glassworks',
'glowworm',
'glutton',
'glycoside',
'goalkeeper',
'goalpost',
'gobble',
'goby-like',
'god-given',
'goddesses',
'gonad',
'goodwill',
'gorged',
'gouge',
'graceless',
'grafting',
'grandchild',
'gratuity',
'gravedigger',
'grebe',
'grid',
'grouch',
'groupers',
'grouse',
'guarantor',
'guilder',
'guillotine',
'guitarfish',
'guillemets',
'habitation',
'habitational',
'hagberry',
'hairstyle',
'hamster',
'handball',
'harbinger',
'harmonize',
'harvester',
'harvesters',
'hashish',
'hassock',
'hatefully',
'hawksbill',
'hawthorn',
'hayfield',
'hazarded',
'headlight',
'headlong',
'heaths',
'hemp',
'heraldic',
'heraldry',
'herbal',
'heterosexual',
'hi',
'hieroglyphs',
'hilted',
'hip-hop',
'hircinous',
'hives',
'hoarfrost',
'hoariness',
'hoe',
'holiness',
'holly',
'homeless',
'homie',
'homosexuality',
'honorific',
'hornet',
'horny',
'horseshoe',
'horticultural',
'hostel',
'houseboat',
'howin',
'hulled',
'humiliate',
'humour',
'hump',
'husked',
'hydroxylase',
'hyperactivity',
'hyperlink',
'hypersensitivity',
'hypersonic',
'hyphen',
'ichthyological',
'icon',
'icositetrahedron',
'ignoble',
'ikebana',
'illicitly',
'illiteracy',
'imaginable',
'immaturely',
'immerse',
'immune',
'impermeable',
'impiously',
'impregnate',
'imprison',
'impure',
'in-law',
'inappropriately',
'incredulousness',
'incriminate',
'indefinably',
'indentation',
'indistinguishably',
'ineptitude',
'infatuated',
'inflectional',
'informer',
'infraclass',
'infrakingdom',
'infraorder',
'infraphylum',
'ingesting',
'inhabitant',
'inhabiting',
'inhale',
'injure',
'inlaying',
'innapropriate',
'inoffensive',
'inoperable',
'inoperative',
'inscribe',
'insinuate',
'inspan',
'instrumentalist',
'intenseness',
'intoxication',
'intoxification',
'inventiveness',
'irascible',
'irritate',
'islamic',
'islet',
'isotope',
'jack',
'javelin',
'jellyfish',
'jerkily',
'jokingly',
'junket',
'kaf',
'kangaroo',
'kanji',
'katydid',
'kayak',
'kestrel',
'ketamine',
'kidskin',
'killjoy',
'kilo-',
'kilt',
'kinase',
'kingfisher',
'kitsch',
'kiwi',
'knighthood',
'kookaburra',
'kowtow',
'kroepoek',
'kung fu',
'labial',
'labour',
'lair',
'lamprey',
'lampshade',
'landmass',
'landmasses',
'laptop',
'larch',
'larva',
'lascivious',
'latte',
'lattice',
'laughable',
'leafless',
'lecherous',
'leech',
'leek',
'leftover',
'legless',
'lemming',
'leniusculus',
'leotard',
'lesbian',
'lettuce',
'lexeme',
'lichen',
'lifespan',
'ligature',
'lighthouse',
'lily',
'litre',
'little sis',
'lizard',
'loanword',
'loggerhead',
'loiter',
'longline',
'loofah',
'lottery',
'lowercase',
'ludifica',
'luxuriant',
'lye',
'madder',
'mafia',
'magnanimous',
'magnetite',
'magnorder',
'manageable',
'mangoes',
'manna',
'manoeuvre',
'manroot',
'maqaf',
'marmot',
'marsh',
'marshy',
'marsupial',
'masturbate',
'masturbates',
'masturbating',
'masturbation',
'masturbator',
'materialise',
'matra',
'mayfly',
'mead',
'meagre',
'mediates',
'mediator',
'mega-',
'megalitre',
'melanin',
'meningitis',
'menorah',
'menstrual',
'mercenaria',
'mercenary',
'meridiem',
'mesmerism',
'metalworks',
'metamphetamine',
'methamphetamine',
'methane',
'metric',
'microcomputer',
'microprocessor',
'midbrain',
'milkman',
'millet',
'millstone',
'minifig',
'minifigure',
'minting',
'minuscules',
'mire',
'misbehave',
'miscarriage',
'miserly',
'mislead',
'misspelling',
'misspelt',
'mite',
'mitral stenosis',
'modem',
'module',
'modulus',
'mollusc',
'mollusk',
'mongrel',
'monogram',
'monopolizing',
'monosemy',
'monosilane',
'monotheistic',
'moonshine',
'moralization',
'morel',
'motorcycle',
'motorsport',
'motorsports',
'moult',
'mourner',
'mouselike',
'mouthpart',
'mow',
'muddle',
'mugwort',
'mulberry',
'multiplier',
'muntjac',
'mutation',
'myalgic',
'mythical',
'nags',
'nape',
'narrate',
'naturopathic',
'naughtily',
'nave',
'neighbour',
'nerd',
'nescio',
'networking',
'neume',
'neurotransmitter',
'newsflash',
'nictinic',
'nightjar',
'nimble',
'ninjutsu',
'niobium',
'nipple',
'nitric',
'nitrite',
'noh',
'noice',
'nomen',
'non-Roma',
'nonchalance',
'nonessential',
'nonfatal',
'nonstandard',
'nontrivial',
'nonzero',
'noodles',
'normality',
'nosocomial',
'notionally',
'nucleon',
'numeral',
'numeric',
'nuqta',
'oar',
'oars',
'obese',
'oblast',
'obligatory',
'obnoxiously',
'obtuse',
'octahedral',
'octave',
'odour',
'oligonucleotide',
'om',
'omnivorous',
'onerous',
'online',
'oppress',
'ore',
'organinc',
'organisation',
'oscillate',
'osier',
'osmanthus',
'ostmanthus',
'otolaryngology',
'ouch',
'outergarment',
'outtake',
'ouzel',
'overseeing',
'overshoe',
'overstate',
'overstep',
'overused',
'ovum',
'oxgang',
'paddle',
'paenungulates',
'palatalized',
'palmistry',
'paltry',
'pancake',
'pancakes',
'pantherine',
'papules',
'paralysed',
'paraphrasis',
'parenthetical',
'parere',
'parietal',
'paronomasia',
'participle',
'parvorder',
'pasta',
'pastern',
'patchy',
'paternal',
'patty',
'pawl',
'pawpaw',
'pedant',
'pediment',
'peevish',
'peloton',
'pelt',
'penetrable',
'penguin',
'penile',
'penis',
'penitent',
'pentatonic',
'perceivable',
'perceptiveness',
'perfluorooctanoic',
'perineum',
'perjurer',
'peroxidase',
'perspire',
'pervert',
'pessimist',
'petal',
'petrel',
'petrol',
'pewter',
'phenylalanine',
'phobia',
'phoneme',
'photocopier',
'photocopy',
'photosynthetic',
'phthisic',
'phthisical',
'phylogenetics',
'phylum',
'pickpocket',
'piddle',
'piecework',
'pierce',
'pigmentation',
'pilfered',
'pinecone',
'pinewood',
'pistil',
'pixelization',
'placable',
'placeholder',
'placenta',
'plantlike',
'playlist',
'pleasurable',
'plectrum',
'plinth',
'ploughgate',
'ploughgates',
'plunderer',
'plural',
'pointy',
'pokeweed',
'pollute',
'polycyclic',
'polyglot',
'polygon',
'polyhedra',
'polyhedron',
'polyiamond',
'polytheistic',
'polytope',
'polyurethane',
'pomelo',
'pommel',
'pons',
'ponyfish',
'popcorn',
'portend',
'positiveness',
'possibly',
'posteroanterior',
'postposition',
'postpositional',
'potable',
'prawn',
'precipitous',
'predatory',
'predicative',
'prefix',
'premeditated',
'preservative',
'preternatural',
'primrose',
'prismatic',
'proclitic',
'procreate',
'profanities',
'prolapse',
'promiscuous',
'pronated',
'prong',
'pronunciation',
'proofreading',
'prosthetic',
'protector',
'prothrombin',
'protists',
'proto',
'protracting',
'provident',
'provider',
'provocativeness',
'provoking',
'psychometrics',
'psychopathological',
'pubic',
'pudding',
'puffin',
'purloin',
'purr',
'pushchair',
'pushy',
'pyrotechnic',
'quad',
'quadrilateral',
'quahog',
'quantifying',
'quark',
'queue',
'quiche',
'quietude',
'quilt',
'quiver',
'radiotherapy',
'ramie',
'rapids',
'raptors',
'rashly',
'raven',
'ravenously',
'ravine',
'reactive',
'readied',
'realtime',
'redskin',
'redstart',
'reed',
'reentry',
'reeve',
'refinedly',
'refiner',
'reflexive',
'reflexively',
'refutation',
'regardful',
'regnant',
'regressive',
'reindeer',
'reintegrationist',
'reinvigorated',
'relenting',
'relinquishment',
'remiss',
'renounce',
'reordered',
'repairer',
'reprimand',
'reproductory',
'reptile',
'republican',
'reset',
'restroom',
'retract',
'retread',
'reunification',
'reusable',
'reveler',
'revengefully',
'rhetorical',
'rhinarium',
'rhombus',
'rhotic',
'rhubarb',
'ribavirin',
'riffraff',
'ripen',
'riverbed',
'roasting',
'rockhopper',
'roe',
'roman',
'romanisation',
'romanization',
'rook',
'roundel',
'rout',
'rudiments',
'rugby',
'rumination',
'rummage',
'saman',
'samurai',
'sandbank',
'satirize',
'saucer',
'sautéed',
'saveloy',
'savoury',
'sawfly',
'sawhorse',
'scabby',
'scabs',
'scaleless',
'scampi',
'scarecrow',
'schoolbag',
'scoff',
'scoffs',
'scold',
'scraper',
'screwdriver',
'scribal',
'scroll',
'scrotum',
'scuba',
'scurf',
'scythe',
'seabird',
'seaduck',
'seagull',
'seaplane',
'seaport',
'seemly',
'seer',
'selfishly',
'selfsame',
'semen',
'semiconductor',
'semimetal',
'semipermeable',
'senso',
'sentimental',
'separator',
'sepulchring',
'sequentially',
'shamelessly',
'shamisen',
'shaojiu',
'shark',
'sheepfold',
'shifter',
'shindig',
'shitting',
'shoal',
'shoemaker',
'shoemaking',
'shoeshine',
'shuffleboard',
'shuttlecock',
'sibling',
'siblings',
'sickbed',
'sideband',
'sidespin',
'silkworm',
'silt',
'silverfish',
'skateboard',
'skein',
'skerry',
'skier',
'sled',
'sleeved',
'sleeveless',
'sloth',
'slut',
'slutty',
'smegma',
'sob',
'sodomite',
'software',
'solfège',
'solicitation',
'sorcerer',
'sorceress',
'sororal',
'spaceflight',
'spacetime',
'spadix',
'spar',
'sparingly',
'sparrow',
'spasmodic',
'specesi',
'speciality',
'spellings',
'sperm',
'spiderweb',
'spirally',
'spiro',
'spiteful',
'spitefully',
'splint',
'spool',
'spore',
'spotnape',
'spp', # Commonly used abbreviation "spp." for subspecies in species names
'sprinkles',
'sprite',
'spritsail',
'spruiks',
'squander',
'squeegee',
'squid',
'squint',
'stabbing',
'stalk',
'stamen',
'standalone',
'starthistle',
'steadfast',
'steadfastness',
'stealthy',
'stenosis',
'sth',
'sthg',
'stich',
'sticker',
'stinginess',
'stinks',
'stockaded',
'stomachache',
'stonechat',
'storey',
'stork',
'stowaway',
'straightness',
'stricto',
'strident',
'stupefy',
'subalgebra',
'subbranch',
'subclass',
'subfamily',
'subgenre',
'subgenus',
'subgroup',
'subkingdom',
'sublimely',
'submatrix',
'submerge',
'suborder',
'subphylum',
'subscriber',
'subsesquiplicate',
'subset',
'subsets',
'subsonic',
'substance',
'subtribe',
'succinctness',
'sudoku',
'sulk',
'sumo',
'sundial',
'sunflower',
'sunglasses',
'sunshade',
'sunshower',
'superannuated',
'supercharger',
'superclass',
'supercluster',
'superdivision',
'superdivisions',
'superfamily',
'superkingdom',
'superorder',
'superphylum',
'supersede',
'superunit',
'surpassingly',
'sustainer',
'sutra',
'swag',
'swearword',
'sweetener',
'sweetening',
'swimmer',
'swimwear',
'swindle',
'swindler',
'swoon',
'swordfish',
'symbiotic',
'synaeresis',
'syncope',
'syperphylum',
'systematics',
'tableware',
'tadpole',
'tailcoat',
'tallness',
'tampon',
'tanker',
'tare',
'tartrazine',
'tastelessly',
'tattle',
'tattletale',
'tattoo',
'taxon',
'taxonomic',
'taxonomy',
'tearful',
'telecom',
'telecommunication',
'teller',
'tera-',
'tern',
'terrene',
'teshuva',
'tesseract',
'testicles',
'tetrafluoromethane',
'tetrafluoromonosilane',
'tetragrams',
'tetrahedron',
'thorax',
'thrombocytopenic',
'thrombotic',
'thunderstorm',
'tibia',
'tiddlywinks',
'tieute',
'tithe',
'toady',
'tofore',
'tomography',
'toothed',
'topological',
'topology',
'torturer',
'touchable',
'towpath',
'trainee',
'tram',
'trans',
'transfinite',
'transliteration',
'transonic',
'treachery',
'tremulous',
'trendy',
'trepidation',
'trickery',
'triterpenoid',
'trove',
'trowelling',
'truncations',
'tsardom',
'tuber',
'tugboat',
'tuna',
'turmeric',
'turner',
'turnip',
'tutelary',
'twig',
'twine',
'two-up',
'typeset',
'typographer',
'tyre',
'unanswerable',
'unassuming',
'uncaring',
'unchallenging',
'unchaste',
'uncircumcised',
'uncivilised',
'uncivilized',
'uncomplicated',
'unconventionally',
'uncooked',
'uncouth',
'uncut',
'undecided',
'undergarment',
'underpants',
'understudy',
'undulate',
'undulation',
'unevenly',
'unfashionable',
'unfasten',
'unfavourable',
'unfrequented',
'ungulate',
'unholy',
'uninformed',
'unintelligent',
'unlikable',
'unmoving',
'unpeeled',
'unprocessed',
'unproven',
'unraveling',
'unravelled',
'unravelling',
'unrestrained',
'unroll',
'unscrupulously',
'unsolicited',
'unsorted',
'unsound',
'unspecialized',
'unspecific',
'untamed',
'untried',
'ununtrium',
'unveiling',
'unwell',
'unworried',
'uppercase',
'urchin',
'urinate',
'urination',
'usance',
'utensil',
'uterus',
'vacating',
'vacillate',
'vandalize',
'vane',
'vapour',
'var.',
'variants',
'verbose',
'verlan',
'verso',
'vertebra',
'vesicle',
'vespers',
'vibrance',
'vibrate',
'videotaped',
'vim',
'viol',
'viper',
'visor',
'vitae',
'voiceless',
'voluptuary',
'vomit',
'voracious',
'vulva',
'wading',
'wafer',
'walkway',
'wank',
'wanker',
'wantonly',
'washerwoman',
'watcher',
'watchfulness',
'watchman',
'waterbirds',
'watercraft',
'waterlilies',
'waw',
'weaverbird',
'webpage',
'weevil',
'wend',
'wether',
'whale',
'whales',
'whirlpool',
'whitefish',
'whitethorn',
'whorl',
'wildcard',
'wildcat',
'wildfire',
'wimp',
'windlass',
'windpipe',
'windward',
'winemaking',
'winterberry',
'wisent',
'womanlike',
'woody',
'workmate',
'workplace',
'worldliness',
'worshipers',
'worshipper',
'wow',
'wrasse',
'wrench',
'wrestler',
'wrinkly',
'yam',
'yardland',
'yarmulke',
'youthfulness',
'yuan',
'zealotry',
'zoospores',
'zygosperm',
])
# These words will never be treated as English words (overriding other
# considerations, not just membership in the set)
not_english_words = set([
# This is a blacklist - these will not be treated as English words
# even though they are in brown.words(). Adding a word on this list
# generally makes it likely to be treated as a romanization.
"Ye",
"al",
"boo",
"em",
"ma",
"de",
"Mihapjungguk",
"Mi",
"ANU",
"Franc",
"Frans",
"Germania",
"Germani",
"Kina",
])
# Construct a set of (most) English words. Multi-word expressions where we
# do not want to include the components can also be put here space-separated.
english_words = (set(brown.words()) |
known_firsts |
# XXX the second words of species names add too much garbage
# now that we accept "english" more loosely.
# set(x for name in known_species for x in name.split()) |
additional_words) - not_english_words
|
#!/usr/bin/env python
# coding:utf-8
import random
import os
from datetime import datetime
from PIL import Image, ImageFilter, ImageDraw, ImageFont
# 随机字母
def rndChar():
return chr(random.randint(65, 90))
# 随机颜色
def rndColor():
return (random.randint(64, 255), random.randint(64, 255), random.randint(64, 255))
def rndColor2():
return (random.randint(32, 127), random.randint(32, 127), random.randint(32, 127))
class PinChar(object):
'create an pin picture with chars(a-zA-Z)'
def __init__(self):
self.width = 60 * 4
self.height = 60
self.__background()
self.__addchar()
self.__blur()
def __background(self):
'生成背景图'
for x in range(width):
for y in range(height):
self.draw.point((x, y), fill=rndColor())
def __addchar(self):
'图像添加字母'
self.charlist = []
font = ImageFont.truetype('Arial.ttf', 36)
for t in range(4):
c = rndChar()
self.charlist.append(c)
draw.text((60 * t + 10, 10), c, font=font, fill = rndColor2())
def __blur(self):
'模糊处理'
self.image = self.image.filter(ImageFilter.BLUR)
def cname(self, salt=''):
'生成文件名'
t = datetime.utcnow()
return str(t) + salt
def save(self, path='.', f='jpeg'):
if path == '.':
os.chdir(os.path.join(os.path.abspath('.'), 'python', 'python3-learn'))
else:
os.chdir(path)
self.image.save(self.name, f)
def image(self):
self.image = Image.new('RGB', (width, height), (255, 255, 255))
self.draw = ImageDraw.Draw(image)
self.__background()
self.__addchar()
self.__blur()
self.name = self.cname()
self.save()
|
#################################################################################
# Copyright (c) 2018-2021, Texas Instruments Incorporated - http://www.ti.com
# All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#################################################################################
from .print_utils import *
from .function_utils import *
from .data_utils import *
from .load_weights import *
from .tensor_utils import *
from .logger import *
from .hist_utils import *
from .params_base import *
from .misc_utils import *
from .config_dict import *
from .attr_dict import *
from .weights_utils import *
from .image_utils import *
from .module_utils import *
from .count_flops import forward_count_flops
from .bn_utils import *
from .range_utils import *
from .quant_utils import *
from .amp import *
from .path_utils import *
from .import_utils import *
try: from .tensor_utils_internal import *
except: pass
try: from .export_utils_internal import *
except: pass
# change has_range_estimator to True here to use a more accurate range estimator
has_range_estimator = False #True
if has_range_estimator:
try: from .range_estimator_internal import *
except: has_range_estimator = False
#
|
# -*- encoding: utf-8 -*-
import os
import datetime
def parse_email_text(lines):
reading_cc_email_addresses = False
reading_to_email_addresses = False
timestamp = None
sender_email_address = ''
to_email_addresses = ''
cc_email_addresses = ''
for line in lines:
line = line.strip()
if 'Date: ' in line:
timestamp = line[6 : -12]
continue
if 'From: ' in line:
sender_email_address += line[6 : ]
continue
if 'To: ' in line:
to_email_addresses += line[4 : ]
reading_to_email_addresses = True
continue
if reading_to_email_addresses == True and 'Subject: ' not in line:
to_email_addresses += line
continue
if 'Subject: ' in line and reading_to_email_addresses == True:
reading_to_email_addresses = False
continue
if 'Cc: ' in line:
cc_email_addresses += line[4 : ]
reading_cc_email_addresses = True
continue
if reading_cc_email_addresses == True and 'Mime-Version' not in line:
cc_email_addresses += line
continue
if 'Mime-Version: ' in line:
reading_cc_email_addresses = False
break # done reading necessary data. no need to parse through the extra lines
return [timestamp, sender_email_address, to_email_addresses, cc_email_addresses]
def load_data(user_email):
incoming_folders = ['inbox', 'notes_inbox']
outgoing_folders = ['sent', 'sent_items']
base_path = 'dataset/'
# a list to store all unique contacts (email-addresses)
# from incoming and outgoing emails.
# The list index of each contact is considered as the
# contact id.
all_contacts = []
num_contacts = 0
# A dictionary to store all groups the user interacted with.
# In addition, it also stores the timestamp of each outgoing
# and incoming interactions user and each group
# each group is a four (4) element dictionary.
# The first element in the tuple is a list that contains one or more contact ids.
# The second element contains a list of timestamps of outgoing interactions.
# The third element contains a list of timestamps of incoming interactions.
# The fourth elemeent is the weight value specifying the strength of the connection
# between the user and the group. At this intial phase, the default value is -1.0
# The key of each group is even as g_groupindex
# Example: {
# 'g_1': {'g_contacts': [4, 7, 90], 'Iout': [23423, 231123, ...], 'Iin': [23423, 231123, 235211], 'weight':-1.0},
# 'g_2': {'g_contacts': [74], 'Iout': [23423, 231123, 235211,...], 'Iin': [23423,...], 'weight':-1.0}
# }
groups = {}
# outgoing mails.
# 2209_, 5158_ multiple contacts
# Date, To, Subject, CC:
for out_folder in outgoing_folders:
path = base_path + out_folder + '/'
email_files = os.listdir(path)
for email_file in email_files:
lines = None
with open(path + email_file, 'r') as f:
lines = f.readlines()
ret = parse_email_text(lines)
timestamp = ret[0]
to_email_addresses = ret[2]
cc_email_addresses = ret[3]
# convert date to POSIX/UNIX time stamp (in seconds since January 1, 1970, 00:00:00 (UTC))
timestamp = datetime.datetime.strptime(timestamp, '%a, %d %b %Y %H:%M:%S').timestamp()
# concatenate to_email_addresses and cc_email_addresses
group_contacts = ', '.join([to_email_addresses, cc_email_addresses])
# break concatenated receiver emails using split
group_contacts = [email.strip() for email in group_contacts.split(',')] # list of email addresses
group_contacts = [email for email in group_contacts if email is not ''] # remove any '' in the list
# add receiver emails to contact list if they are not already part of the list
for email in group_contacts:
if email not in all_contacts:
all_contacts.append(email)
# convert group contacts from email address to corresponding index (integer)
group_contacts = [all_contacts.index(email) for email in group_contacts] # list of index representative of email addresses
group_contacts = set(group_contacts)
# check whether group_contacts already exist in the group list
group_exist = False
for g_idx, g_info in groups.items():
if len(group_contacts.symmetric_difference(set(g_info['g_contacts']))) == 0:
group_exist = True
break
if group_exist:
groups[g_idx]['Iout'].append(timestamp)
else:
num_groups = len(groups)
new_idx = 'g_' + str(num_groups+1)
groups[new_idx] = dict({'g_contacts': group_contacts, 'Iout': [timestamp], 'Iin':[], 'weight':-1.0})
# incoming mails.
# ex: 26_
# Date, To, Subject, CC:
for in_folder in incoming_folders:
path = base_path + in_folder + '/'
email_files = os.listdir(path)
for email_file in email_files:
lines = None
with open(path + email_file, 'r') as f:
lines = f.readlines()
ret = parse_email_text(lines)
timestamp = ret[0]
sender_email_address = ret[1]
to_email_addresses = ret[2]
cc_email_addresses = ret[3]
# convert date to POSIX/UNIX time stamp (in seconds since January 1, 1970, 00:00:00 (UTC))
timestamp = datetime.datetime.strptime(timestamp, '%a, %d %b %Y %H:%M:%S').timestamp()
# concatenate to_emails and cc_emails
group_contacts = ', '.join([sender_email_address, to_email_addresses, cc_email_addresses])
# break concatenated receiver emails using split
group_contacts = [email.strip() for email in group_contacts.split(',')] # list of email addresses
group_contacts = [email for email in group_contacts if email is not ''] # remove any '' in the list
# since this is an incoming mail, the user's email address is part of the group contacts (i.e.
# as part of either `cc` email addresses or `to` email addresses).
# Therefore remove user email address. Because when user is a receiver of the email, then
# all other recepient contacts in the email and including the sender becomes a group
# connected to the user.
#assert user_email in group_contacts, 'an error occurred processing file ' + path + email_file
if user_email in group_contacts:
group_contacts.remove(user_email)
# add receiver emails to contact list if they are not already part of the list
for email in group_contacts:
if email not in all_contacts:
all_contacts.append(email)
# convert group contacts from email address to corresponding index (integer)
group_contacts = [all_contacts.index(email) for email in group_contacts] # list of index representative of email addresses
group_contacts = set(group_contacts)
# check whether group_contacts already exist in the group list
group_exist = False
for g_idx, g_info in groups.items():
if len(group_contacts.symmetric_difference(set(g_info['g_contacts']))) == 0:
group_exist = True
break
if group_exist:
groups[g_idx]['Iin'].append(timestamp)
else:
num_groups = len(groups)
new_idx = 'g_' + str(num_groups+1)
groups[new_idx] = dict({'g_contacts': group_contacts, 'Iout': [], 'Iin':[timestamp], 'weight':-1.0})
return [groups, all_contacts]
|
from typing import List
from collections import deque
maze = [
[1,1,1,1],
[1,0,0,1],
[1,1,0,1],
[1,0,1,1],
]
dx = [1, 0, -1, 0]
dy = [0, 1, 0, -1]
direction_char: [str] = ['D', 'R', 'U', 'L'] # 往小走是 up,往大走是 down
def solve(board: List[List[int]]):
row = len(board)
col = len(board[0])
if row == 0 or col == 0:
return
for j in range(col):
search_BFS(board, 0, j)
search_BFS(board, row - 1, j)
for i in range(row):
search_BFS(board, i, 0)
search_BFS(board, i, col - 1)
# recover the board
for i in range(row):
for j in range(col):
if board[i][j] == -1:
board[i][j] = 0
else:
board[i][j] = 1
for r in board:
print(r)
def search_BFS(board, x: int, y: int):
if board[x][y] == 1:
return
row = len(board)
col = len(board[0])
q1 = deque()
q1.append((x, y))
board[x][y] = -1
while q1:
topX, topY = q1.popleft()
for i in range(4):
topX += dx[i]
topY += dy[i]
if row > topX >= 0 <= topY < col and maze[x][y] == 0:
board[topX][topX] = -1
q1.append((topX, topY))
def search_DFS(board, x: int, y: int):
if board[x][y] != 0:
return
board[x][y] = -1
row = len(board)
col = len(board[0])
if x > 1:
search_DFS(board, x-1, y)
if x < row - 2:
search_DFS(board, x+1, y)
if y > 1:
search_DFS(board, x, y-1)
if y < col - 2:
search_DFS(board, x, y+1)
if __name__ == '__main__':
solve(maze)
|
import typing
import torch
class ToTensor(object):
def __init__(self,
keys: typing.Iterable = None,
dtypes: typing.Iterable[tuple] = None):
self.keys = keys
self.dtypes = dtypes
def __call__(self, **data) -> dict:
_keys = self._resolve_keys(data)
torch_data = {}
for key, item in data.items():
if key in _keys:
torch_data[key] = torch.from_numpy(item)
else:
torch_data[key] = item
if self.dtypes is not None:
for _dtype in self.dtypes:
torch_data[_dtype[0]] = torch_data[_dtype[0]].to(
dtype=_dtype[1])
return torch_data
def _resolve_keys(self, data) -> typing.Iterable:
if self.keys is None:
_keys = data.keys()
else:
_keys = self.keys
return _keys
|
import requests
url = "https://s3.amazonaws.com/download.onnx/models/opset_9/inception_v1.tar.gz"
response = requests.get(url)
with open('output', 'wb') as file:
file.write(response.content)
|
CENTRALIZED = False
EXAMPLE_PAIR = "ZRX-WETH"
USE_ETHEREUM_WALLET = True
FEE_TYPE = "FlatFee"
FEE_TOKEN = "ETH"
DEFAULT_FEES = [0, 0.00001]
|
# CELL 0
# Import necessary libraries
import matplotlib.pyplot as plt
from sklearn.datasets import load_wine
from sklearn.naive_bayes import GaussianNB
from sklearn import metrics
# CELL 1
# Load the wine dataset and split the data
dataset = load_wine()
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(dataset.data, dataset.target, test_size = 0.20, random_state = 94)
# CELL 2
# GaussianNB
gnb = GaussianNB()
# Train the model
gnb.fit(X_train, Y_train)
# Complete training
Y_predicted = gnb.predict(X_test)
print(f"Accuracy:- {metrics.accuracy_score(Y_test, Y_predicted)}")
|
# CamJam EduKit 3 - Robotics
# Worksheet 3 - Motor Test Code
import RPi.GPIO as GPIO # Import the GPIO Library
import time # Import the Time library
# Set the GPIO modes
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
# Set the GPIO Pin mode
GPIO.setup(7, GPIO.OUT)
GPIO.setup(8, GPIO.OUT)
GPIO.setup(9, GPIO.OUT)
GPIO.setup(10, GPIO.OUT)
# Turn all motors off
GPIO.output(7, 0)
GPIO.output(8, 0)
GPIO.output(9, 0)
GPIO.output(10, 0)
# Turn the right motor forwards
GPIO.output(9, 0)
GPIO.output(10, 1)
# Turn the left motor forwards
GPIO.output(7, 0)
GPIO.output(8, 1)
# Wait for 1 seconds
time.sleep(1)
# Reset the GPIO pins (turns off motors too)
GPIO.cleanup()
|
print('Olá, Mundo!')
nome = str(input('Qual é o seu nome? '))
idade = int(input('Qual é a sua idade? '))
print(f'Bem vindo {nome}, fico feliz em saber que tem {idade} anos.')
|
import random
from pygame import Vector2
from blasteroids.server.game_objects import Asteroid
class AsteroidFactory:
def __init__(self, config, id_generator):
self.id_generator = id_generator
self.min_speed = config.asteroid.min_speed
self.max_speed = config.asteroid.max_speed
self.damage = {}
self.health = {}
self.collision_radius = {}
for i in range(3):
self.damage[i + 1] = int(pow(3, i) * config.asteroid.base_damage)
self.health[i + 1] = int(pow(3, i) * config.asteroid.base_health)
self.collision_radius[i + 1] = int((i + 1) * config.asteroid.base_radius)
def create(self, level, position):
return Asteroid(
self.id_generator.get_next_id(),
position,
Vector2(0, 1).rotate(random.random() * 360.0),
Vector2(0, 1).rotate(random.random() * 360.0) * min(self.min_speed, random.random() * self.max_speed),
self.collision_radius[level],
self.damage[level],
self.health[level],
level,
)
|
from sstcam_sandbox.d190730_pedestal import all_files
import argparse
from subprocess import call
from os.path import exists
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dry', dest='dry', action="store_true")
args = parser.parse_args()
dry = args.dry
for ped_file in all_files:
r0_path = ped_file.r0
ped_path = ped_file.tcal
shell_path = ped_path.replace(".tcal", ".sh")
generate_ped = f"generate_ped -i {r0_path} -o {ped_path} -t\n"
if not exists(r0_path):
raise FileNotFoundError(f"Missing R0 file: {r0_path}")
with open(shell_path, 'w') as file:
file.write("source $HOME/.bash_profile\n")
file.write("source activate cta\n")
file.write("export NUMBA_NUM_THREADS=6\n")
file.write(generate_ped)
file.write(f"if [ -f {ped_path} ]; then\n")
file.write(f"\trm -f {shell_path}\n")
file.write("fi\n")
call("chmod +x {}".format(shell_path), shell=True)
cmd = "qsub -cwd -V -P short {}".format(shell_path)
print(cmd)
if not dry:
call(cmd, shell=True)
if __name__ == '__main__':
main()
|
import FWCore.ParameterSet.Config as cms
hcaldigisAnalyzer = cms.EDAnalyzer("HcalDigisValidation",
outputFile = cms.untracked.string(''),
digiTag = cms.InputTag("hcalDigis"),
QIE10digiTag= cms.InputTag("hcalDigis"),
QIE11digiTag= cms.InputTag("hcalDigis"),
mode = cms.untracked.string('multi'),
hcalselector= cms.untracked.string('all'),
mc = cms.untracked.string('yes'),
simHits = cms.untracked.InputTag("g4SimHits","HcalHits"),
emulTPs = cms.InputTag("emulDigis"),
dataTPs = cms.InputTag("simHcalTriggerPrimitiveDigis"),
TestNumber = cms.bool(False),
hep17 = cms.bool(False)
)
from Configuration.Eras.Modifier_fastSim_cff import fastSim
if fastSim.isChosen():
hcaldigisAnalyzer.simHits = cms.untracked.InputTag("famosSimHits","HcalHits")
from Configuration.Eras.Modifier_run2_HCAL_2017_cff import run2_HCAL_2017
run2_HCAL_2017.toModify(hcaldigisAnalyzer,
TestNumber = cms.bool(True)
)
from Configuration.Eras.Modifier_run2_HEPlan1_2017_cff import run2_HEPlan1_2017
run2_HEPlan1_2017.toModify(hcaldigisAnalyzer,
hep17 = cms.bool(True)
)
from Configuration.Eras.Modifier_phase2_hcal_cff import phase2_hcal
phase2_hcal.toModify(hcaldigisAnalyzer,
dataTPs = cms.InputTag(""),
digiTag = cms.InputTag("simHcalDigis"),
QIE10digiTag = cms.InputTag("simHcalDigis","HFQIE10DigiCollection"),
QIE11digiTag = cms.InputTag("simHcalDigis","HBHEQIE11DigiCollection"),
)
|
import os
from openbiolink.graph_creation import graphCreationConfig as glob
from openbiolink.graph_creation.metadata_edge.edgeRegularMetadata import EdgeRegularMetadata
from openbiolink.graph_creation.metadata_infile import InMetaEdgeStringInhibition
from openbiolink.graph_creation.metadata_infile.mapping.inMetaMapString import InMetaMapString
from openbiolink.graph_creation.types.qualityType import QualityType
class EdgeMetaGeneInhibitionGene(EdgeRegularMetadata):
NAME = 'Edge - Gene_inhibition_Gene'
LQ_CUTOFF = 0
MQ_CUTOFF = 400
HQ_CUTOFF = 700
EDGE_INMETA_CLASS = InMetaEdgeStringInhibition
MAP1_META_CLASS = InMetaMapString
def __init__(self, quality : QualityType= None):
edges_file_path = os.path.join(glob.IN_FILE_PATH, self.EDGE_INMETA_CLASS.CSV_NAME)
mapping_file1 = os.path.join(glob.IN_FILE_PATH, self.MAP1_META_CLASS.CSV_NAME)
super().__init__(is_directional=True,
edges_file_path=edges_file_path,
colindex1=self.EDGE_INMETA_CLASS.NODE1_COL, colindex2=self.EDGE_INMETA_CLASS.NODE2_COL,
edgeType=self.EDGE_INMETA_CLASS.EDGE_TYPE,
node1_type=self.EDGE_INMETA_CLASS.NODE1_TYPE, node2_type=self.EDGE_INMETA_CLASS.NODE2_TYPE,
colindex_qscore=self.EDGE_INMETA_CLASS.QSCORE_COL, quality=quality,
mapping1_file=mapping_file1, map1_sourceindex=self.MAP1_META_CLASS.SOURCE_COL, map1_targetindex=self.MAP1_META_CLASS.TARGET_COL,
mapping2_file=mapping_file1, map2_sourceindex=self.MAP1_META_CLASS.SOURCE_COL, map2_targetindex=self.MAP1_META_CLASS.TARGET_COL)
|
from __future__ import print_function
import random
import numpy as np
import torch
import torch.utils.data as data
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from PIL import Image
_CIFAR_DATASET_DIR = './datasets/CIFAR'
_CIFAR_MEAN_PIXEL = [0.5071, 0.4867, 0.4408] # [x/255.0 for x in [125.3, 123.0, 113.9]]
_CIFAR_STD_PIXEL = [0.2675, 0.2565, 0.2761] # [x/255.0 for x in [63.0, 62.1, 66.7]]
class CIFARbase(data.Dataset):
def __init__(
self,
data_dir=_CIFAR_DATASET_DIR,
split='train',
transform_train=None,
transform_test=None,
version='CIFAR100'):
assert split in ('train', 'val')
self.split = split
self.name = version + '_' + split
self.transform_test = transform_test
self.transform_train = transform_train
if self.split is not 'train':
self.transform = self.transform_test
else:
self.transform = self.transform_train
print(self.transform)
self.data = datasets.__dict__[version](
data_dir,
train=(self.split=='train'),
download=True,
transform=self.transform)
self.labels = self.data.targets
self.images = self.data.data
def __getitem__(self, index):
img, label = self.images[index], self.labels[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = Image.fromarray(img)
if self.transform is not None:
img = self.transform(img)
return img, label
def __len__(self):
return len(self.images)
class CIFAR100(CIFARbase):
def __init__(
self,
data_dir=_CIFAR_DATASET_DIR,
split='train',
do_not_use_random_transf=False):
normalize = transforms.Normalize(mean=_CIFAR_MEAN_PIXEL, std=_CIFAR_STD_PIXEL)
transform_test = transforms.Compose([
transforms.ToTensor(),
normalize])
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4, padding_mode='reflect'),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize])
if do_not_use_random_transf:
transform_train = transform_test
CIFARbase.__init__(
self,
data_dir=data_dir,
split=split,
transform_train=transform_train,
transform_test=transform_test,
version='CIFAR100')
|
"""Test pbtranscript.collapsing.Branch."""
import unittest
import os.path as op
import cPickle
import filecmp
import numpy as np
from pbcore.io.GffIO import Gff3Record
from pbtranscript.Utils import rmpath, mkdir
from pbtranscript.io import ContigSetReaderWrapper, iter_gmap_sam, GroupWriter, CollapseGffWriter
from pbtranscript.collapsing import Branch, ContiVec, transfrag_to_contig, \
exons_match_sam_record, compare_exon_matrix, get_fl_from_id, collapse_sam_records
from test_setpath import DATA_DIR, OUT_DIR, SIV_DATA_DIR, SIV_STD_DIR
_OUT_DIR_ = op.join(OUT_DIR, "test_branch")
SORTED_GMAP_SAM = op.join(SIV_DATA_DIR, 'test_branch', 'sorted-gmap-output.sam')
READS_DS = op.join(SIV_DATA_DIR, 'test_collapsing', 'gmap-input.fastq.contigset.xml')
def _get_sam_groups(ignored_ids_writer=None):
"""Returns grouped sam records read from SORTED_GMAP_SAM and READS_DS."""
query_len_dict = ContigSetReaderWrapper.name_to_len_dict(READS_DS)
groups = [g for g in iter_gmap_sam(sam_filename=SORTED_GMAP_SAM,
query_len_dict=query_len_dict,
min_aln_coverage=0.99, min_aln_identity=0.85,
ignored_ids_writer=ignored_ids_writer)]
return groups
def _get_contiVec_and_offset():
"""Returns contiVec and offset of groups[0]["+"]."""
contivec_pickle_fn = op.join(SIV_DATA_DIR, 'test_branch', 'contiVec.pickle')
a = cPickle.load(open(contivec_pickle_fn, 'rb'))
return a['contiVec'], a['offset']
def _get_exons():
"""Returns exons of groups[0]["+"]."""
contiVec, offset = _get_contiVec_and_offset()
exons = contiVec.to_exons(offset=offset)
return exons
class TEST_Branch(unittest.TestCase):
"""Test functions of pbtranscript.collapsing.Branch."""
def setUp(self):
"""Define input and output file."""
rmpath(_OUT_DIR_)
mkdir(_OUT_DIR_)
def test_transfrag_to_contig(self):
"""Test transfrag_to_contig, which takes a group of overlapping sam
records as input and return (contiVec, offset, chrom, strand),
where contiVec is a nparray vector of contig"""
expected_contiVec, expected_offset = _get_contiVec_and_offset()
groups = _get_sam_groups()
contiVec, offset, chrom, strand = \
transfrag_to_contig(groups[0]["+"], skip_5_exon_alt=True)
self.assertEqual(offset, 10710)
self.assertEqual(chrom, "SIRV1")
self.assertEqual(strand, "+")
self.assertEqual(contiVec, expected_contiVec)
self.assertEqual(expected_offset, 10710)
def test_ContiVec_to_exons(self):
"""Test ContiVec.to_exons"""
exons = _get_exons() # contains 10 intervals
p = []
exons.traverse(p.append)
expected_tree_0 = [(10710, 10712, 0), (10712, 10713, 1),
(10713, 10715, 2), (10715, 10791, 3),
(10791, 10791, 4), (10882, 11057, 5),
(11057, 11057, 6), (11434, 11638, 7),
(11638, 11640, 8), (11640, 11641, 9)]
self.assertEqual([(node.start, node.end, node.interval.value) for node in p],
expected_tree_0)
def test_exons_match_sam_record(self):
"""Test exons_match_sam_record, which takes a GMAP sam reocord and an exon tree
(type IntervalUniqueTree, created by contiVec.to_exons) as input and
returns a list of nodes this GMAP sam record corresponds to."""
exons = _get_exons() # contains 10 intervals
records = _get_sam_groups()[0]["+"] # contains 10 sam records
self.assertEqual(len(records), 10)
stuffs = [exons_match_sam_record(record=record, exons=exons) for record in records]
# The very first sam record contains exons 0, 1, 2, 3, 5, 7, 8, 9, but not 4, 6.
self.assertEqual([node.value for node in stuffs[0]], [0, 1, 2, 3, 5, 7, 8, 9])
# The second sam record contains exons 1, 2, 3, 5, 7, 8, 9, but not 1, 4, 6.
self.assertEqual([node.value for node in stuffs[1]], [1, 2, 3, 5, 7, 8, 9])
# The third sam record contains exons 1, 2, 3, 5, 7, not 0, 4, 6, 8, 9
self.assertEqual([node.value for node in stuffs[2]], [1, 2, 3, 5, 7])
self.assertEqual([node.value for node in stuffs[3]], [1, 2, 3, 5, 7, 8])
self.assertEqual([node.value for node in stuffs[4]], [1, 2, 3, 5, 7, 8, 9])
self.assertEqual([node.value for node in stuffs[5]], [1, 2, 3, 5, 7, 8, 9])
self.assertEqual([node.value for node in stuffs[6]], [1, 2, 3, 5, 7])
self.assertEqual([node.value for node in stuffs[7]], [2, 3, 5, 7, 8, 9])
self.assertEqual([node.value for node in stuffs[8]], [2, 3, 5, 7, 8])
self.assertEqual([node.value for node in stuffs[9]], [3, 5, 7, 8, 9])
def test_compare_exon_matrix(self):
"""
test compare_exon_matrix, which takes two exon matrix (m1 and m2) as input and
returns True if m1 and m2 can be merged and False otherwise.
An exon matrix m is a 2-d array where m[0, i] is 1 if the 1-th exon will be used.
"""
exons = _get_exons() # contains 10 intervals
records = _get_sam_groups()[0]["+"] # contains 10 sam records
stuffs = [exons_match_sam_record(record=record, exons=exons) for record in records]
p = []
exons.traverse(p.append)
node_d = dict((x.interval.value, x) for x in p) # exon index --> exon node
exon_all_indices = range(0, len(p))
ms = [np.asarray([[1 if exon_index in [node.value for node in stuffs[record_index]] else 0
for exon_index in exon_all_indices]])
for record_index in range(0, len(records))]
self.assertTrue(np.all(ms[0] == np.asarray([[1, 1, 1, 1, 0, 1, 0, 1, 1, 1]])))
self.assertTrue(np.all(ms[1] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 1, 1]])))
self.assertTrue(np.all(ms[2] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 0, 0]])))
self.assertTrue(np.all(ms[3] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 1, 0]])))
self.assertTrue(np.all(ms[4] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 1, 1]])))
self.assertTrue(np.all(ms[5] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 1, 1]])))
self.assertTrue(np.all(ms[6] == np.asarray([[0, 1, 1, 1, 0, 1, 0, 1, 0, 0]])))
self.assertTrue(np.all(ms[7] == np.asarray([[0, 0, 1, 1, 0, 1, 0, 1, 1, 1]])))
self.assertTrue(np.all(ms[8] == np.asarray([[0, 0, 1, 1, 0, 1, 0, 1, 1, 0]])))
self.assertTrue(np.all(ms[9] == np.asarray([[0, 0, 0, 1, 0, 1, 0, 1, 1, 1]])))
for i in xrange(0, 10):
for j in xrange(i, 10):
self.assertTrue(compare_exon_matrix(ms[i], ms[j], strand='+', node_d=node_d)[0])
mx = np.asarray([[1, 0, 0, 1, 0, 1, 0, 1, 1, 1]])
# modified exon matrix
self.assertFalse(compare_exon_matrix(ms[0], mx, strand='+', node_d=node_d)[0])
def test_iterative_merge_transcripts(self):
"""No test yet"""
pass # skip
def test_get_fl_from_id(self):
"""Test get_fl_from_id(list_of_ids)"""
ids = ["i2_HQ_sampleb92221|c242/f13p0/2019", "i2_HQ_sampleb92221|c24/f3p0/2019",
"i2_HQ_sampleb92221|c103/f3p0/2371", "i2_HQ_sampleb92221|c107/f3p0/2368",
"i2_HQ_sampleb92221|c1215/f2p0/2374", "i2_HQ_sampleb92221|c122/f4p0/2368"]
expected_fl = 28
self.assertEqual(get_fl_from_id(ids), expected_fl)
def test_collapse_sam_records(self):
"""Test collapse_sam_records, which takes in a list of grouped sam records. and
write collapsed gff records to good_gff_writer|bad_gff_writer. A collapsed
gff record is 'good' if there are >= cov_threshold supportive sam records
belonging to its group; otherwise, 'bad'.
"""
test_name = "test_collapse_sam_records"
good_gff_fn = op.join(_OUT_DIR_, test_name + ".good.gff.unfuzzy")
bad_gff_fn = op.join(_OUT_DIR_, test_name + ".bad.gff.unfuzzy")
group_fn = op.join(_OUT_DIR_, test_name + ".group.txt.unfuzzy")
rmpath(good_gff_fn)
rmpath(bad_gff_fn)
rmpath(group_fn)
records = _get_sam_groups()[0]["+"] # contains 10 sam records
with CollapseGffWriter(good_gff_fn) as good_gff_writer, \
CollapseGffWriter(bad_gff_fn) as bad_gff_writer, \
GroupWriter(group_fn) as group_writer:
collapse_sam_records(records=records, cuff_index=0, cov_threshold=2,
allow_extra_5exon=False, skip_5_exon_alt=True,
good_gff_writer=good_gff_writer,
bad_gff_writer=bad_gff_writer,
group_writer=group_writer)
def str_to_gffrecord(line):
fields = line.strip().split('\t')
print fields
attributes = []
for attr_tuple in fields[8].split(';'):
if len(attr_tuple.strip()) == 0:
continue
else:
fs = attr_tuple.strip().split(' ')
if len(fs) == 2:
attributes.append((fs[0], fs[1].replace('"', '')))
return Gff3Record(seqid=fields[0], start=fields[3], end=fields[4],
type=fields[2], attributes=attributes)
bad_gff_records = [str_to_gffrecord(line) for line in open(bad_gff_fn, 'r') if not line.startswith('##')]
self.assertEqual(len(bad_gff_records), 0)
good_gff_records = [str_to_gffrecord(line) for line in open(good_gff_fn, 'r') if not line.startswith('##')]
self.assertEqual(len(good_gff_records), 4)
self.assertEqual([(int(r.start), int(r.end), r.type, r.attributes['gene_id'], r.attributes['transcript_id']) for r in good_gff_records],
[(10711, 11641, 'transcript', "PB.0", "PB.0.1"),
(10711, 10791, 'exon', "PB.0", "PB.0.1"),
(10883, 11057, 'exon', "PB.0", "PB.0.1"),
(11435, 11641, 'exon', "PB.0", "PB.0.1"),
])
def test_Branch(self):
"""
Test Branch and Branch.run.
Note that fuzzy junctions are not merged.
"""
test_name = "test_branch"
good_gff_fn = op.join(_OUT_DIR_, test_name + ".good.gff.unfuzzy")
bad_gff_fn = op.join(_OUT_DIR_, test_name + ".bad.gff.unfuzzy")
group_fn = op.join(_OUT_DIR_, test_name + ".group.txt.unfuzzy")
rmpath(good_gff_fn)
rmpath(bad_gff_fn)
rmpath(group_fn)
b = Branch(isoform_filename=READS_DS, sam_filename=SORTED_GMAP_SAM,
cov_threshold=2, min_aln_coverage=0.99, min_aln_identity=0.95)
b.run(allow_extra_5exon=True, skip_5_exon_alt=False,
ignored_ids_fn=None,
good_gff_fn=good_gff_fn,
bad_gff_fn=bad_gff_fn,
group_fn=group_fn)
self.assertTrue(op.exists(good_gff_fn))
self.assertTrue(op.exists(bad_gff_fn))
self.assertTrue(op.exists(group_fn))
std_good_gff_fn = op.join(SIV_STD_DIR, "test_branch", test_name + ".good.gff.unfuzzy")
std_bad_gff_fn = op.join(SIV_STD_DIR, "test_branch", test_name + ".bad.gff.unfuzzy")
std_group_fn = op.join(SIV_STD_DIR, "test_branch", test_name + ".group.txt.unfuzzy")
print "Comparing %s and %s" % (good_gff_fn, std_good_gff_fn)
self.assertTrue(filecmp.cmp(good_gff_fn, std_good_gff_fn))
self.assertTrue(filecmp.cmp(bad_gff_fn, std_bad_gff_fn))
self.assertTrue(filecmp.cmp(group_fn, std_group_fn))
|
"""
Utility to download the CIFAR-10 dataset
Author: Ioannis Kourouklides, www.kourouklides.com
License:
https://github.com/kourouklides/artificial_neural_networks/blob/master/LICENSE
"""
# %%
# IMPORTS
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# standard library imports
import os
# %%
def download_cifar_10(new_dir=os.getcwd()):
"""
Main function
"""
# %%
# IMPORTS
os.chdir(new_dir)
# code repository sub-package imports
from artificial_neural_networks.utils.data_utils import download_dataset
# %%
file_url = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
file_name = 'cifar-10-python.tar.gz'
file_path = download_dataset(file_url, file_name)
# %%
return file_path
# %%
if __name__ == '__main__':
dataset_path = download_cifar_10('../../')
|
from celery import Celery
from contentcuration.utils.celery.tasks import CeleryTask
class CeleryApp(Celery):
task_cls = CeleryTask
result_cls = 'contentcuration.utils.celery.tasks:CeleryAsyncResult'
_result_cls = None
def on_init(self):
"""
Use init call back to set our own result class. Celery doesn't yet have an easier way
to customize this class specifically
"""
self._result_cls = self.subclass_with_self(self.result_cls)
@property
def AsyncResult(self):
return self._result_cls
|
"""
Use this file to write your solution for the Summer Code Jam 2020 Qualifier.
Important notes for submission:
- Do not change the names of the two classes included below. The test suite we
will use to test your submission relies on existence these two classes.
- You can leave the `ArticleField` class as-is if you do not wish to tackle the
advanced requirements.
- Do not include "debug"-code in your submission. This means that you should
remove all debug prints and other debug statements before you submit your
solution.
"""
import datetime
import typing
import re
class ArticleField:
"""The `ArticleField` class for the Advanced Requirements."""
def __init__(self, field_type: typing.Type[typing.Any]):
pass
class Article:
"""The `Article` class you need to write for the qualifier."""
def __init__(self, title: str, author: str, publication_date: datetime.datetime, content: str):
self.title = title
self.author = author
self.publication_date =publication_date
self.content = content
def __repr__(self):
return '<Article title=' + repr(self.title) + ' author=' + repr(self.author) + ' publication_date=' + repr(self.publication_date.isoformat()) + '>'
def __len__(self):
return len(self.content)
def short_introduction(self, n_characters):
last_breakpoint_index = 0
for index in range(n_characters):
if self.content[index] == ' ' or self.content[index] == '\n' :
last_breakpoint_index = index
# print(last_breakpoint_index)
return self.content[0:last_breakpoint_index]
def most_common_words(self, n_words):
alphanumeric_string = re.sub(r'[^a-zA-Z ]', ' ' , self.content.lower() )
words_list = alphanumeric_string.split(' ')
frequency_dict = {}
for word in words_list:
if word in frequency_dict.keys():
frequency_dict[word] += 1
else:
frequency_dict[word] = 1
frequency_dict = dict(sorted(frequency_dict.items(), key=lambda x : x[1] , reverse=True))
temp_dict = {}
counter = 0
for key,value in frequency_dict.items() :
if( counter < n_words and key != ''):
temp_dict[key] = value
counter += 1
return temp_dict
# return frequency_dict
########
# fairytale = Article(
# title="The emperor's new clothes",
# author="Hans Christian Andersen",
# content="'But he has nothing at all on!' at last cried out all the people. The Emperor was vexed, for he knew that the people were right.",
# publication_date=datetime.datetime(1837, 4, 7, 12, 15, 0),
# )
# # print(fairytale.title)
# # print(len(fairytale))
# # print(fairytale.short_introduction(60))
# print(fairytale.content + '\n')
# print(fairytale.most_common_words(3))
|
from __future__ import print_function
from .LEDLightSource import *
|
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from goodtables import validate
# Validate
def test_check_maximum_length_constraint(log):
source = [
['row', 'word'],
[2, 'a'],
[3, 'ab'],
[4, 'abc'],
[5, 'abcd'],
[6],
]
schema = {'fields': [
{'name': 'row', 'type': 'integer'},
{'name': 'word', 'type': 'string', 'constraints': {'maxLength': 2}}
]}
report = validate(source, schema=schema, checks=[
'maximum-length-constraint',
])
assert log(report) == [
(1, 4, 2, 'maximum-length-constraint'),
(1, 5, 2, 'maximum-length-constraint'),
]
|
import py
from rpython.jit.metainterp.test import test_fficall
from rpython.jit.backend.x86.test.test_basic import Jit386Mixin
class TestFfiCall(Jit386Mixin, test_fficall.FfiCallTests):
# for the individual tests see
# ====> ../../../metainterp/test/test_fficall.py
pass
|
a = int(input())
b = int(input())
c = int(input())
if a >= b and a >= c:
print(a)
if b < c:
print(b)
print(c)
else:
print(c)
print(b)
elif b > a and b > c:
print(b)
if a < c:
print(a)
print(c)
else:
print(c)
print(a)
elif c > a and c > b:
print(c)
if a < b:
print(a)
print(b)
else:
print(b)
print(a)
|
"""Align target text to reference translation.
"""
import argparse
WINDOW_SIZE = 30
MAX_THRESHOLD = 0.9
MIN_THRESHOLD = 0.4
VOCAB = 'glove.840B.300d'
PROGRESS = False
DEVICE = 'cpu'
def get_parser():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--target', '-t', required=True,
help='The target text file to align.')
parser.add_argument(
'--reference', '-r', required=True,
help='The reference translation to align to.')
parser.add_argument(
'--output', '-o', required=True,
help='The output file to write the aligned target text.')
parser.add_argument(
'--window_size', '-w', type=int, default=WINDOW_SIZE,
help='The number of reference sentences to compare per target.')
parser.add_argument(
'--max_threshold', type=float, default=MAX_THRESHOLD,
help='The ABLEU threshold to assume best matching sentences.')
parser.add_argument(
'--min_threshold', type=float, default=MIN_THRESHOLD,
help='The minimum ABLEU score for valid alignment.')
parser.add_argument(
'--vocab', '-v', default=VOCAB,
help='The pretrained alias from `torchtext.vocab` to use.')
parser.add_argument(
'--cache_dir',
help='The directory to save vocabulary cache.')
parser.add_argument(
'--progress', '-p', action='store_true', default=PROGRESS,
help='Show progress bar.')
parser.add_argument(
'--device', '-d', default=DEVICE,
help='The `torch.device` value to use in calculations.')
return parser
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from flask import Response, current_app, request
from itsdangerous import BadSignature, URLSafeSerializer
from airflow.api_connexion import security
from airflow.api_connexion.exceptions import NotFound
from airflow.api_connexion.schemas.dag_source_schema import dag_source_schema
from airflow.models.dagcode import DagCode
from airflow.security import permissions
log = logging.getLogger(__name__)
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE)])
def get_dag_source(file_token: str):
"""Get source code using file token"""
secret_key = current_app.config["SECRET_KEY"]
auth_s = URLSafeSerializer(secret_key)
try:
path = auth_s.loads(file_token)
dag_source = DagCode.code(path)
except (BadSignature, FileNotFoundError):
raise NotFound("Dag source not found")
return_type = request.accept_mimetypes.best_match(['text/plain', 'application/json'])
if return_type == 'text/plain':
return Response(dag_source, headers={'Content-Type': return_type})
if return_type == 'application/json':
content = dag_source_schema.dumps(dict(content=dag_source))
return Response(content, headers={'Content-Type': return_type})
return Response("Not Allowed Accept Header", status=406)
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utilities for dealing with file modes.
import stat
def filetype(mode):
"""
Returns "dir" or "file" according to what type path is.
@param mode: file mode from "stat" command.
"""
if stat.S_ISLNK(mode):
return "link"
elif stat.S_ISDIR(mode):
return "dir"
elif stat.S_ISREG(mode):
return "file"
else:
return "unknown"
def rwxtype(mode):
""" Returns l/d/-/? for use in "rwx" style strings. """
if stat.S_ISLNK(mode):
return "l"
elif stat.S_ISDIR(mode):
return "d"
elif stat.S_ISREG(mode):
return "-"
else:
return "?"
BITS = (stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR,
stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP,
stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH,
stat.S_ISVTX)
def expand_mode(mode):
return map(lambda y: bool(mode & y), BITS)
def compress_mode(tup):
mode = 0
for b, n in zip(tup, BITS):
if b:
mode += n
return mode
def rwx(mode):
"""
Returns "rwx"-style string like that ls would give you.
I couldn't find much extant code along these lines;
this is similar in spirit to the google-able "pathinfo.py".
"""
bools = expand_mode(mode)
s = list("rwxrwxrwxt")
for (i, v) in enumerate(bools[:-1]):
if not v:
s[i] = "-"
# Sticky bit should either be 't' or no char.
if not bools[-1]:
s = s[:-1]
return rwxtype(mode) + "".join(s)
|
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='efficiency',
version='0.4',
packages=find_packages(exclude=['tests*']),
license='MIT',
description='A package for efficient programming',
long_description=long_description,
long_description_content_type='text/markdown',
install_requires=['spacy'],
url='https://github.com/zhijing-jin/efficiency',
author='Z',
author_email='zhijing.jin@connect.hku.hk'
)
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
def hash_index(v, group):
"""
Hash values to store hierarchical index
:param v: index value
:param group: variables from which index was derived
:return: str
v = [1, 2]
group = ['q1', 'q2]
return 'q1::1__q2::2'
"""
if not isinstance(v, (list, tuple)):
_hash = list(zip(group, [v]))
else:
_hash = list(zip(group, v))
return '__'.join(list(map(lambda x: '%s::%s' % (x[0], x[1]), _hash)))
def unhash_index(_hash):
"""
Decode hased value to tuple
:param _hash: str, hash_index result
:return: tuple of tuples
hash = 'q1::1__q2::2'
return ((q1, 1), (q2, 2))
"""
try:
return tuple(map(lambda x: tuple(x.split('::')), _hash.split('__')))
except:
print(_hash)
def equlize_size(lst):
"""
Equalize size of incoming iterables
:param args: 2-dimensional iterables
:return: tuple of tuples
"""
max_size = max(map(lambda x: len(x), lst))
ret = []
for v in lst:
ret.append(tuple(v) + (('', ''),) * (max_size - len(v)))
return tuple(ret)
|
XXXXXXXXX XXXXX
XXXX
XXXXXXXXX XXX XXXXXXXXXX XXXXXXXX X XXXXXXXXX XXXXXXXX XXX XXXXXX XXXXXXXXX
XXX XXXXXXXXXX XXX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXX XXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXX XX XXXXXXX XXXX XX XXXXXX XXXXXXX XXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXXX
XXXXXX
XXX XXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXXX XXXXXXXXXXXX
XXXX XXXXXX XX XXX XXXXXXXXXX XXXXXXXX XXXXX XXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXXXX
XXX
XXXX XXXXXX XXXX XXXXXXXXXXXX XXXXXX XXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXX XXXXXXXXXX XXXXXXXX XXXX XX
XXXXXXX XXXXXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXXX XXXXXXXXX XXXX XXXXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
XXXX
XXXXXXXX XXXXXXXX X XXXXX XXX XXX XXXX XX XXXXXXXXX XXXXXXX XXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX
XXX XXX XXXXXXXXXX XXXXXX XXXXX XXXXXXX XXX XXXXXXXXX XX XXXXX XXXXXX XXX XX XXXXX XXXXXX
XXXX XX XXX XXXX XXXXXXX XXXXXXXX XXX XXXX XXXXXXXXX XX XXXXX XXXXX XXXXXX XXXX XXX
XXX XXXXXX XXXXXX XXXX XX XXX XX XXXXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XX XXXXX
XXXX
XXXXX XXX X XXXXXXXX XXXXXXXX XXXX XXXXXX XXXXXXX XXXXXXXX XXXXXX XXX XXXXXXXXX XXXXXXXXXX XXXX XX XXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXX X
X XXXXX XXXXXXXXXXX XXXXXX X XXXXXXXXX XXXX XXXXXXXXXX XXXXXXXXXX XXXX XXXXXXXXXXX X XX XX XXXXXXX XXXXXXX XXXXX XXXX XXXX XXXXX XX XXXXXX XXXXX XXXXXX XXX XXXXX XX X XXXXXXXXXX
X XXXXXX XXXXXXX XXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXX XXXXXXX XXXXXX XX XX XXXXXXX XXXXXXX XXXXX XXXXXXX XXXXX
XXXX XX XXXX XXXXX X XXXX XXXXX XXXX XX XXXXXX XX XXX XXXXX
X XXXXX XXXXXXXXXXXXXX XXXXXX X XXXXXXX XXXXXXXX X X
X
XXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX
XXX XXX XXXXXX XXXXX XXXXXX XX XXXXXXX XXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXX XXX XXXXXXXXX XXX XX XXXXX XXXXXX XXXXXXXXXX XXXXXXX XXX XXXXXXXXXX
XXXX XXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXX XXXXXXXX XX XXXXX XXXXXXXXXXX
XXXX XXXXXXXXX XXXXXXXXX XX XXXX XXX XXXXX XXXX XX XXXXXX XXXXXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXXX XXXXX XXXXXXXX XXXXX XXX XXXXXXXX
XXXX
XXXXX XXX X XXXXXXXX XXXXXXXX XXXX XXXXXX XXXXXXX XXXXXX XXXXXXXX XXXXXX XXX XXXXXXXXX XXXXXXXXXX XXXX XX XXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
X XXXXX XXXXXXXXXXX XXXXXXX X XXXXXXX XXXXXXXXXX X XX XX XXXXXXXX XXXXXXXX XXXXX XXXX XXX XXX XXXXXXXXXX
X XXXXX XXXXXXXXXXXX XXXXXXX X XXXXXXXXXXXX XXXXXX X XX XX XXXXXXX XXXX XXXX XX XXXX XX XXXXXX XXXXX XXXXXX
XXXX XX XXXX XXXXX X XXXX XXXXX XXXX XX XXXXXX XX XXX XXXXX
X XXXXX XXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXXXXXX XXXXXXXXX X XX
X XXXXX XXXXXXX X
X
XX XXXXX XXXXXXXX XX XXXXX XXXXXXXXX XXXXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXX
XXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXX XXX XXX XXX XXXXXX XXXX XXXXXXX XXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXX XXX XXX XXX XXXXXX XXXX XXXX XXXXXXXX XXXXXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXXX
XXX
XXXXXXXXXXXXXXXXXXXXXX XX XXXXXX XXXXXX XXXXXXXX XXXX XXXX XXXXXXX XXX XXXXX XXXX XX XXX XXXXXXXXX
XXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXX
XXXX
XXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXXXX
XXXXXXXXXXX X
XXXX XXXXXXXX
XXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXX XXX X X
XXX XXXXXX X XXXXXXXXXXX
XXXXXXXXXXXXX X XXXXXXXXXXX XX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX X XX XXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX X XXXXX XX
XXXXXX X XXX
XX X XXXXXXXXXXXXX X X
XX XX XXXXXXX XXXXXXX XXXXXXXXXXXXX XXX XXXX XXXXXXXXX XXXX XXXXXXXX XXXXXXXX XXXXXXXX
XX X XXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXX
XXXX
XXXXXXX
X
XX X XXXXXXXXXXXXX X
XXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XX
XX XXXXXXX XXXXX XXX XXXXXXXXXXX XX XX XXX XXXXXXX XXXXXXXXXXXXX XXXX XXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXX X X
XXXXXX XX
XXX XXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX XX XXX XXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX X X
XXXXXX X
XXX XXXXXXX XXXXXX XXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXX X
X
XX XXXXXXX XXXXXX XXXXXXX XXXXX XX XXXX XXXX X XXXXX XXXX XXXXXXXX
XXXX X
XXXXXX XX XXX XXXXXXX XXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX
X
XX XXXXXXXX XX XXXXX XXX XX XXXX XXXXXXX XXXXXXXXXXX XX XXXXXXX XXXXX XXXXXXXX
XXX XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXX XXX XX
X XX
XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX X XXXXXXX XXX X XX
XXXXXXXXXXXXXXXXX XXXXXXXXXXXXX X
XX XXXXX XXXXXXX XXXXXXXXX XX XXXX XXXXXXX XXXX XX XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXX XXXXX
XXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXX XXXXX
XXXXXXX XXX
X XX
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXX X X
XXX XXXXXX X XXX
XXXXXXX X XXXXXXXXXXXXXXX
XXX X XXX X X XX X X XXXXXXXXXXXXXXX XXX X X
XXX XXXXX X XXXXXXXXXXXXXXXXX XXXXXXXX X XX XXXXXXXXXXX XX
XX X XXXXX X
XXXXXXXXXXXX XXXXX XX
X
XXXXXX XXXXXXXXXXXXXXXX X X XXXXXXXXXXX X XXXXXXXX X XX X X X X XXXXXX X XXXXXXXXXXXX XXXXXXX X X XXXXXXX
X
XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX X X
XXX XXXXXX X XXX
XX X XXXXXX XXXXX XX XXXXXXXX X
XXXXXX XXXX X XXXXX X XXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXX XXXXX X X
XXXXXX XXXXXXXXXXXXXXXXX XXXXX XX
XX XXXX XXXXX XXXX XXXXXXXX XXXXXX XXXXXXX XXXXXXXXXXXXX XXX XXXXX XXX XX XXXXX XX XXXX XXXXXX
XX X XXXXXXXXXXXX XX XXXXXXXXXXXX X
XXXXXXX
XX X XXXXXXXXXX X
XXXXXXXXXXXX XXXXXX XXX X XXXXXXXXXX X XXXX XX
XX X XXXXXXXXXXXX X
XXXXXXXXXXXX XXXXXXXX X X XXXXXXXXXXXXXXXXX XXXXXXXXXXXX X XX
XX X XXXXXXXXXXXX X
XXXXXXXXXXXX XXXXXXX X X XXXXXXXXXXXXXXXXX XXXXXXXXXXX X XX
XXXXXX XX X X XXXXXXXXXXXX XX X X X X XXX
X
XXXXXXXX XXXXXXXXXXXXXXXXX XXXXX X X
XX X XXXXXX XXXXX XX XXXXXXXX X
XXXXXX XXXX X XXXXX X XXXXX
XXX XXXXX X XXX
XX XXXXX
XXX X XXX X X XX X X XXXXXXXXXXXXX XXX X X
XXXX X XXXXXX X XX
XX X XXXXXX XXXX XX XXXXXXXX X
XXXXXXXXXXX XXXX XX
XXXX X
XX X XXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXX XXX XX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX X XX
X
X
XXXXXX XX XXX X XXXXXXXXXXX XXXX XXX X X XXX XXX
X
XX XXXXXXX X XXXXXXXXXXXXX XXXXXXXXXXXX X XXXXXXX XXXXX XXXXX
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX X X
XXX XXXX X XXX
XXXXX
XXX X XXXX XX XXXXX X X
XXXXX XXXXXX XXXX XXXXXX X X XXXXX
X
XXXXXX XXXXX
X
XXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX
XXX
XXXXXXXX X XXX XXXX XXXXXX XXX XXX XXXXXXXX X XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX
XX XXXXXXXXXX
XXXXXXXXX XXXXXX XXXXXXXXXX XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXX
XXXXXXXX XXX XXXXXX XXXXXXXXX
XXXX
XXXXXX
XXXXXXX
XXXXXXX
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-05 02:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0157_auto_20180701_1026'),
]
operations = [
migrations.AlterField(
model_name='backlogaccessory',
name='accessory_name',
field=models.CharField(default='', max_length=120),
),
migrations.AlterField(
model_name='statementaccessory',
name='accessory_name',
field=models.CharField(default='', max_length=120),
),
]
|
from collections import OrderedDict
import numpy as np
from robosuite_extra.env_base import SawyerEnv
from robosuite.models.arenas import TableArena
from robosuite.models.objects import BoxObject
from robosuite.utils.mjcf_utils import array_to_string
from robosuite_extra.utils import transform_utils as T
from robosuite_extra.controllers import SawyerEEFVelocityController
from robosuite_extra.reach_env import ReachTask
import copy
from collections import deque
class SawyerReach(SawyerEnv):
"""
This class corresponds to the reaching for the Sawyer robot arm. The gripper needs to arrive 2cm above
a specified goal on the table.
"""
def __init__(
self,
gripper_type="PushingGripper",
parameters_to_randomise=None,
randomise_initial_conditions=True,
table_full_size=(0.8, 1.6, 0.719),
use_camera_obs=False,
use_object_obs=True,
reward_shaping=True,
use_indicator_object=False,
has_renderer=False,
has_offscreen_renderer=True,
render_collision_mesh=False,
render_visual_mesh=True,
control_freq=10,
horizon=50,
ignore_done=False,
camera_name="frontview",
camera_height=256,
camera_width=256,
camera_depth=False,
pid=True,
success_radius=0.01
):
"""
Args:
gripper_type (str): type of gripper, used to instantiate
gripper models from gripper factory.
parameters_to_randomise [string,] : List of keys for parameters to randomise, None means all the available parameters are randomised
randomise_initial_conditions [bool,]: Whether or not to randomise the starting configuration of the task.
table_full_size (3-tuple): x, y, and z dimensions of the table.
use_camera_obs (bool): if True, every observation includes a
rendered image.
use_object_obs (bool): if True, include object (cube) information in
the observation.
reward_shaping (bool): if True, use dense rewards.
use_indicator_object (bool): if True, sets up an indicator object that
is useful for debugging.
has_renderer (bool): If true, render the simulation state in
a viewer instead of headless mode.
has_offscreen_renderer (bool): True if using off-screen rendering.
render_collision_mesh (bool): True if rendering collision meshes
in camera. False otherwise.
render_visual_mesh (bool): True if rendering visual meshes
in camera. False otherwise.
control_freq (float): how many control signals to receive
in every second. This sets the amount of simulation time
that passes between every action input.
horizon (int): Every episode lasts for exactly @horizon timesteps.
ignore_done (bool): True if never terminating the environment (ignore @horizon).
camera_name (str): name of camera to be rendered. Must be
set if @use_camera_obs is True.
camera_height (int): height of camera frame.
camera_width (int): width of camera frame.
camera_depth (bool): True if rendering RGB-D, and RGB otherwise.
pid (bool) : Whether to use a velocity pid contoler or the mujoco proportional velocity contoler
success_radius (bool) : how close to the goal si considered a success.
"""
self.initialised = False
# settings for table
self.table_full_size = table_full_size
# whether to use ground-truth object states
self.use_object_obs = use_object_obs
# reward configuration
self.reward_shaping = reward_shaping
if (self.reward_shaping):
self.reward_range = [-np.inf, horizon * (0.1)]
else:
self.reward_range = [0, 1]
# Whether to use dynamics domain randomisation
self.parameters_to_randomise = parameters_to_randomise
self.randomise_initial_conditions = randomise_initial_conditions
self.dynamics_parameters = OrderedDict()
self.default_dynamics_parameters = OrderedDict()
self.parameter_sampling_ranges = OrderedDict()
self.factors_for_param_randomisation = OrderedDict()
self.success_radius = success_radius
#Param for storing a specific goal starting position
self.specific_goal_position = None
super().__init__(
gripper_type=gripper_type,
gripper_visualization=False,
use_indicator_object=use_indicator_object,
has_renderer=has_renderer,
has_offscreen_renderer=has_offscreen_renderer,
render_collision_mesh=render_collision_mesh,
render_visual_mesh=render_visual_mesh,
control_freq=control_freq,
horizon=horizon,
ignore_done=ignore_done,
use_camera_obs=use_camera_obs,
camera_name=camera_name,
camera_height=camera_height,
camera_width=camera_width,
camera_depth=camera_depth,
pid=pid,
)
self._set_default_dynamics_parameters(pid)
self._set_default_parameter_sampling_ranges()
self._set_dynamics_parameters(self.default_dynamics_parameters)
self._set_factors_for_param_randomisation(self.default_dynamics_parameters)
# Check that the parameters to randomise are within the allowed parameters
if (self.parameters_to_randomise is not None):
self._check_allowed_parameters(self.parameters_to_randomise)
# IK solver for placing the arm at desired locations during reset
self.IK_solver = SawyerEEFVelocityController()
self.init_control_timestep = self.control_timestep
self.init_qpos = self.mujoco_robot.init_qpos
gripper_tip_xpos = self.sim.data.get_site_xpos("grip_site").copy()
gripper_to_eef_in_world = self.sim.data.get_body_xpos("right_hand") - gripper_tip_xpos
self.gripper_size = np.linalg.norm(gripper_to_eef_in_world)
# Storing parameters for temporary switching
self.cached_parameters_to_randomise = None
self.cached_dynamics_parameters = None
self.initialised = True
self.reset()
def _set_dynamics_parameters(self, parameters):
self.dynamics_parameters = copy.deepcopy(parameters)
def _default_damping_params(self):
# return np.array([0.01566, 1.171, 0.4906, 0.1573, 1.293, 0.08688, 0.1942]) # -real world calibration
# return np.array([0.8824,2.3357,1.1729, 0.0 , 0.5894, 0.0 ,0.0082]) #- command calibration
return np.array([8.19520686e-01, 1.25425414e+00, 1.04222253e+00,
0.00000000e+00, 1.43146116e+00, 1.26807887e-01, 1.53680244e-01,]) #- command calibration 2
def _default_armature_params(self):
return np.array([0.00000000e+00, 0.00000000e+00, 2.70022664e-02, 5.35581203e-02,
3.31204140e-01, 2.59623415e-01, 2.81964631e-01,])
def _default_joint_friction_params(self):
return np.array([4.14390483e-03,
9.30938506e-02, 2.68656509e-02, 0.00000000e+00, 0.00000000e+00,
4.24867204e-04, 8.62040317e-04])
def _set_default_dynamics_parameters(self, use_pid):
"""
Setting the the default environment parameters.
"""
self.default_dynamics_parameters['joint_forces'] = np.zeros((7,))
self.default_dynamics_parameters['acceleration_forces'] = np.zeros((7,))
self.default_dynamics_parameters['eef_forces'] = np.zeros((6,))
self.default_dynamics_parameters['eef_timedelay'] = np.asarray(0)
self.default_dynamics_parameters['timestep_parameter'] = np.asarray(0.0)
self.default_dynamics_parameters['pid_iteration_time'] = np.asarray(0.)
self.default_dynamics_parameters['mujoco_timestep'] = np.asarray(0.002)
self.default_dynamics_parameters['action_additive_noise'] = np.asarray(0.0)
self.default_dynamics_parameters['action_multiplicative_noise'] = np.asarray(0.0)
self.default_dynamics_parameters['action_systematic_noise'] = np.asarray(0.0)
self.default_dynamics_parameters['eef_obs_position_noise'] = np.asarray(0.0)
self.default_dynamics_parameters['eef_obs_velocity_noise'] = np.asarray(0.0)
link_masses = np.zeros((7,))
for link_name, idx, body_node, mass_node, joint_node in self._robot_link_nodes_generator():
if (mass_node is not None):
dynamics_parameter_value = float(mass_node.get("mass"))
link_masses[idx] = dynamics_parameter_value
self.default_dynamics_parameters['link_masses'] = link_masses
self.default_dynamics_parameters['joint_dampings'] = self._default_damping_params()
self.default_dynamics_parameters['armatures'] = self._default_armature_params()
self.default_dynamics_parameters['joint_frictions'] = self._default_joint_friction_params()
if (use_pid):
gains = self.mujoco_robot.velocity_pid_gains
kps = np.array([gains['right_j{}'.format(actuator)]['p'] for actuator in range(7)])
kis = np.array([gains['right_j{}'.format(actuator)]['i'] for actuator in range(7)])
kds = np.array([gains['right_j{}'.format(actuator)]['d'] for actuator in range(7)])
#
self.default_dynamics_parameters['kps'] = kps
self.default_dynamics_parameters['kis'] = kis
self.default_dynamics_parameters['kds'] = kds
else:
kvs = np.zeros((7,))
for target_joint, jnt_idx, node in self._velocity_actuator_nodes_generator():
gains_value = float(node.get("kv"))
kvs[jnt_idx] = gains_value
self.default_dynamics_parameters['kvs'] = kvs
def _set_default_parameter_sampling_ranges(self):
"""
Returns the parameter ranges to draw samples from in the domain randomisation.
"""
parameter_ranges = {
'joint_forces': np.array([[0.,0.,0.,0.,0.,0.,0.],[1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5]]),# [2., 1.5, 1.5, 1.5 ,0.75 ,0.5,0.3]
'acceleration_forces': np.array([[0.,0.,0.,0.,0.,0.,0.], [0.12,0.12,0.12,0.12,0.12,0.12,0.12]]),#
'eef_forces': np.array([[0.,0.,0.,0.,0.,0.], [0.06 ,0.06,0.06,0.01,0.01,0.01,]]), #
'eef_timedelay': np.array([0, 1]),
'timestep_parameter': np.array([0.0, 0.01]),
'pid_iteration_time': np.array([0., 0.04]), #-1 and 0 are not allowed values
'mujoco_timestep': np.array([0.001,0.002]),
'action_additive_noise': np.array([0.01, 0.1]),
'action_multiplicative_noise': np.array([0.005,0.02]),
'action_systematic_noise': np.array([-0.05, 0.05]),
'eef_obs_position_noise': np.array([0.0005, 0.001]),
'eef_obs_velocity_noise': np.array([0.0005, 0.001]),
'link_masses': np.array([0.98, 1.02]),
'joint_dampings': np.array([0.5, 2.]),
'armatures': np.array([0.66, 1.5]),
'joint_frictions': np.array([0.66, 1.5]),
}
if (self.pid):
parameter_ranges['kps'] = np.array([0.66, 1.5])
parameter_ranges['kis'] = np.array([0.66, 1.5])
parameter_ranges['kds'] = np.array([0.66, 1.5])
else:
parameter_ranges['kvs'] = [0.5, 2]
self.parameter_sampling_ranges = parameter_ranges
def _set_factors_for_param_randomisation(self, parameters):
factors = copy.deepcopy(parameters)
factors['joint_forces'] = np.ones((7,))
factors['acceleration_forces'] = np.ones((7,))
factors['eef_forces'] = np.ones((1,))
factors['eef_timedelay'] = 1.0
factors['timestep_parameter'] = 1.0
factors['pid_iteration_time'] = 1.0
factors['mujoco_timestep'] = 1.0
factors['action_additive_noise'] = 1.0
factors['action_multiplicative_noise'] = 1.0
factors['action_systematic_noise'] = 1.0
factors['eef_obs_position_noise'] = 1.0
factors['eef_obs_velocity_noise'] = 1.0
self.factors_for_param_randomisation = factors
def _velocity_actuator_nodes_generator(self):
"""
Caching the xml nodes for the velocity actuators for use when setting the parameters
"""
for node in self.model.root.findall(".//velocity[@kv]"):
target_joint = node.get("joint")
jnt_idx = int(target_joint[-1])
yield target_joint, jnt_idx, node
def _robot_link_nodes_generator(self):
"""
Caching the xml nodes for the velocity actuators for use when setting the parameters
"""
for link_idx, link_name in enumerate(self.mujoco_robot.links):
body_node = self.mujoco_robot.root.find(".//body[@name='{}']".format(link_name))
mass_node = body_node.find("./inertial[@mass]")
joint_node = body_node.find("./joint")
yield link_name, link_idx, body_node, mass_node, joint_node
def _check_allowed_parameters(self, parameters):
allowed_parameters = self.get_parameter_keys()
for param in parameters:
assert param in allowed_parameters, '{} not allowed. Only allowed parameters are {}'.format(param,
allowed_parameters)
def _select_appropriate_distribution(self, key):
'''
Which distribution to use to sample the different dynamics parameters.
:param key: The parameter to consider.
'''
if (
key == 'joint_forces'
or key == 'acceleration_forces'
or key == 'eef_forces'
or key == 'timestep_parameter'
or key == 'pid_iteration_time'
or key == 'mujoco_timestep'
or key == 'action_additive_noise'
or key == 'action_multiplicative_noise'
or key == 'action_systematic_noise'
or key == 'eef_obs_position_noise'
or key == 'eef_obs_velocity_noise'
or key == 'link_masses'
):
return self.np_random.uniform
elif (
key == 'eef_timedelay'
):
return self._ranged_random_choice
else:
return self._loguniform
def _loguniform(self, low=1e-10, high=1., size=None):
return np.asarray(np.exp(self.np_random.uniform(np.log(low), np.log(high), size)))
def _ranged_random_choice(self,low, high, size=1):
vals = np.arange(low,high+1)
return self.np_random.choice(vals, size)
def _parameter_for_randomisation_generator(self, parameters=None):
'''
Generates (key,value) pairs of sampled dynamics parameters.
:param parameters: The parameters to be sampled for randomisation, if None, all the allowed parameters are sampled.
'''
parameter_ranges = self.parameter_sampling_ranges
if (parameters is None):
parameters = self.get_parameter_keys()
for key in parameters:
parameter_range = parameter_ranges[key]
if (parameter_range.shape[0] == 1):
yield key, np.asarray(parameter_range[0])
elif (parameter_range.shape[0] == 2):
distribution = self._select_appropriate_distribution(key)
size = self.default_dynamics_parameters[key].shape
yield key, np.asarray(
self.factors_for_param_randomisation[key] * distribution(*parameter_ranges[key], size=size))
else:
raise RuntimeError('Parameter radomisation range needs to be of shape {1,2}xN')
def _load_model(self):
"""
Loads an xml model, puts it in self.model. This sets up the mujoco xml for the scene.
"""
super()._load_model()
self.mujoco_robot.set_base_xpos([0, 0, 0])
### Domain Randomisation ###
if (self.initialised):
for key, val in self._parameter_for_randomisation_generator(parameters=self.parameters_to_randomise):
self.dynamics_parameters[key] = val
## Queues for adding time delays
self.eef_pos_queue = deque(maxlen=int(self.dynamics_parameters['eef_timedelay'] + 1))
self.eef_vel_queue = deque(maxlen=int(self.dynamics_parameters['eef_timedelay'] + 1))
if (self.pid is not None):
self.pid.sample_time = self.dynamics_parameters['pid_iteration_time']
### Create the Task ###
## Load the Arena ##
self.mujoco_arena = TableArena(
table_full_size=self.table_full_size, table_friction=(0.1, 0.001, 0.001)
)
if self.use_indicator_object:
self.mujoco_arena.add_pos_indicator()
# The sawyer robot has a pedestal, we want to align it with the table
self.mujoco_arena.set_origin([0.16 + self.table_full_size[0] / 2, 0, 0])
goal = BoxObject(size=[0.023 / 2, 0.023 / 2, 0.001 / 2],
rgba=[0, 1, 0, 1], )
## Put everything together into the task ##
self.model = ReachTask(self.mujoco_arena, self.mujoco_robot, goal)
### Set the goal position ###
# Gripper position at neutral
gripper_pos_neutral = [0.44969246, 0.16029991, 1.00875409]
if(self.specific_goal_position is not None):
init_pos = np.array([gripper_pos_neutral[0] +self.specific_goal_position[0],
gripper_pos_neutral[1] + self.specific_goal_position[1],
self.model.table_top_offset[2]])
init_pos = array_to_string(init_pos)
elif (self.randomise_initial_conditions):
# Place goal in a 20x20cm square directly below the eef neutral position.
noise = self.np_random.uniform(-1, 1, 3) * np.array([0.20, 0.20, 0.0])
offset = np.array([gripper_pos_neutral[0], gripper_pos_neutral[1], self.model.table_top_offset[2]])
init_pos = array_to_string(noise + offset)
else:
init_pos = np.concatenate([gripper_pos_neutral[:2], [self.model.table_top_offset[2]]])
init_pos = array_to_string(init_pos)
self.model.xml_goal.set("pos", init_pos)
### Set the xml parameters to the values given by the dynamics_parameters attribute ###
if (self.initialised):
self._apply_xml_dynamics_parameters()
def _apply_xml_dynamics_parameters(self):
"""
Applying the values contained in dynamics_parameters to the xml elements of the model. If a pid is used this
also applied the pid gains contained in the dynamics parameters.
"""
opt_node = self.model.root.find('option')
opt_node.set("timestep", str(self.dynamics_parameters['mujoco_timestep']))
for link_name, idx, body_node, mass_node, joint_node in self._robot_link_nodes_generator():
if (mass_node is not None):
mass_node.set("mass", str(self.dynamics_parameters['link_masses'][idx]))
if (joint_node is not None):
joint_node.set("damping", str(self.dynamics_parameters['joint_dampings'][idx]))
joint_node.set("armature", str(self.dynamics_parameters['armatures'][idx]))
joint_node.set("frictionloss", str(self.dynamics_parameters['joint_frictions'][idx]))
if (self.pid):
self.pid.tunings = (self.dynamics_parameters['kps'],
self.dynamics_parameters['kis'],
self.dynamics_parameters['kds'],
)
else:
for target_joint, jnt_idx, node in self._velocity_actuator_nodes_generator():
node.set("kv", str(self.dynamics_parameters['kvs'][jnt_idx]))
def set_parameter_sampling_ranges(self, sampling_ranges):
'''
Set a new sampling range for the dynamics parameters.
:param sampling_ranges: (Dict) Dictionary of the sampling ranges for the different parameters of the form
(param_name, range) where param_name is a valid param name string and range is a numpy array of dimensionality
{1,2}xN where N is the dimension of the given parameter
'''
for candidate_name, candidate_value in sampling_ranges.items():
assert candidate_name in self.parameter_sampling_ranges, 'Valid parameters are {}'.format(self.parameter_sampling_ranges.keys())
assert candidate_value.shape[0] == 1 or candidate_value.shape[0]==2, 'First dimension of the sampling parameter needs to have value 1 or 2'
assert len(candidate_value.shape) == len(self.parameter_sampling_ranges[candidate_name].shape), '{} has the wrong number of dimensions'.format(candidate_name)
if(len(self.parameter_sampling_ranges[candidate_name].shape) >1):
assert self.parameter_sampling_ranges[candidate_name].shape[1] == candidate_value.shape[1], '{} has the wrong shape'.format(candidate_name)
self.parameter_sampling_ranges[candidate_name] = candidate_value
def get_parameter_sampling_ranges(self):
return copy.deepcopy(self.parameter_sampling_ranges)
def get_parameter_keys(self):
return self.default_dynamics_parameters.keys()
def get_total_parameter_dimension(self):
total_size = 0.
for key, val in self.default_dynamics_parameters.items():
total_size += val.size
return total_size
def get_internal_state(self):
return np.concatenate([self._joint_positions, self._joint_velocities]).tolist()
def get_internal_state_dimension(self):
internal_state = self.get_internal_state()
return len(internal_state)
def change_parameters_to_randomise(self, parameters):
self._check_allowed_parameters(parameters)
self._set_dynamics_parameters(self.default_dynamics_parameters)
self.parameters_to_randomise = parameters
def get_randomised_parameters(self):
if (self.parameters_to_randomise is not None):
return self.parameters_to_randomise
else:
return self.get_parameter_keys()
def get_randomised_parameter_dimensions(self):
""" Return the number of dimensions of the ranomised parameters"""
randomised_parameter_names = self.get_randomised_parameters()
total_dimension = 0
for param in randomised_parameter_names:
param_shape = self.default_dynamics_parameters[param].shape
if(param_shape ==()):
total_dimension += 1
else:
total_dimension += param_shape[0]
return total_dimension
def get_dynamics_parameters(self):
"""
Returns the values of the current dynamics parameters.
"""
return copy.deepcopy(self.dynamics_parameters)
def get_default_dynamics_parameters(self):
"""
Returns the default values of the dynamics parameters.
"""
return copy.deepcopy(self.default_dynamics_parameters)
def get_factors_for_randomisation(self):
"""
Returns the factor used for domain randomisation.
"""
return copy.deepcopy(self.factors_for_param_randomisation)
def set_dynamics_parameters(self, dynamics_parameter_dict):
"""
Setting the dynamics parameters of the environment to specific values. These are going to be used the next
time the environment is reset, and will be overriden if domain randomisation is on.
:param dynamics_parameter_dict: Dictionary with the values of the parameters to set.
"""
for key, value in dynamics_parameter_dict.items():
assert key in self.dynamics_parameters, 'Setting a parameter that does not exist'
self.dynamics_parameters[key] = value
def randomisation_off(self,):
'''
Disable the parameter randomisation temporarily and cache the current set of parameters and
which parameters are being randomised.This can be useful for evaluation.
'''
current_params_to_randomise = self.get_randomised_parameters()
current_params = self.get_dynamics_parameters()
self.cached_parameters_to_randomise = current_params_to_randomise
self.cached_dynamics_parameters = current_params
self.parameters_to_randomise = []
return current_params, current_params_to_randomise
def randomisation_on(self):
'''
Restoring the randomisation as they were before the call to switch_params
'''
if(self.cached_dynamics_parameters is None):
print("Randomisation was not switched off before switching it back on.")
return
self.parameters_to_randomise = self.cached_parameters_to_randomise
self.set_dynamics_parameters(self.cached_dynamics_parameters)
self.cached_parameters_to_randomise = None
self.cached_dynamics_parameters = None
def sample_parameter_randomisation(self, parameters=None):
''' Samples a dictionary of dynamics parameters values using the randomisation process currently set in the environment
parameters ([string,]) : List of parameters to sample a randomisation from. If None, all the allowed parameters are sampled.
'''
if (not self.initialised):
print('Function has undefined behaviour if environment fully initialised, returning with no effect')
return
parameters_sample = {}
for key, val in self._parameter_for_randomisation_generator(parameters):
assert key in self.get_parameter_keys(), '{} not allowed. Choose from {}'.format(key,
self.get_parameter_keys())
parameters_sample[key] = val
return parameters_sample
def _set_goal_neutral_offset(self, goal_x, goal_y):
self.specific_goal_position = np.array([goal_x, goal_y])
def _get_reference(self):
"""
Sets up references to important components. A reference is typically an
index or a list of indices that point to the corresponding elements
in a flatten array, which is how MuJoCo stores physical simulation data.
"""
super()._get_reference()
# goal ids
self.goal_body_id = self.sim.model.body_name2id("goal")
self.goal_site_id = self.sim.model.site_name2id("goal")
# Gripper ids
self.l_finger_geom_ids = [
self.sim.model.geom_name2id(x) for x in self.gripper.left_finger_geoms
]
self.r_finger_geom_ids = [
self.sim.model.geom_name2id(x) for x in self.gripper.right_finger_geoms
]
def _reset_internal(self):
"""
Resets simulation internal configurations.
"""
super()._reset_internal()
self.sim.forward()
self.init_right_hand_quat = self._right_hand_quat
self.init_right_hand_orn = self._right_hand_orn
self.init_right_hand_pos = self._right_hand_pos
eef_rot_in_world = self.sim.data.get_body_xmat("right_hand").reshape((3, 3))
self.world_rot_in_eef = copy.deepcopy(eef_rot_in_world.T)
if (self.randomise_initial_conditions and self.initialised):
# Start the gripper in a 10x10 cm area around the neutral position
eef_pos = self.sim.data.get_body_xpos('right_hand')
noise = self.np_random.uniform(-1, 1, 3) * np.array([0.10, 0.10, 0.0])
init_pos = eef_pos + noise
init_pose = T.make_pose(init_pos, np.array([[0.0, 1.0, 0.0], [1.0, 0.0, 0.0], [0.0, 0.0, -1.0]]))
# Start the IK search from the rest qpos
ref_q = self.mujoco_robot.init_qpos
# Express init_pose in the base frame of the robot
init_pose_in_base = self.pose_in_base(init_pose)
# Do the IK
joint_angles = self.IK_solver.compute_joint_angles_for_endpoint_pose(init_pose_in_base, ref_q)
# Set the robot joint angles
self.set_robot_joint_positions(joint_angles)
self.sim.forward()
def reward(self, action=None):
"""
Reward function for the task.
The dense reward has three components.
Reaching: in [-inf, 0], to encourage the arm to reach the object
Goal Distance: in [-inf, 0] the distance between the pushed object and the goal
The sparse reward only receives a {0,1} upon reaching the goal
Args:
action (np array): The action taken in that timestep
Returns:
reward (float or dict): the reward if sparce rewards are used otherwise a dictionary
with the total reward, and the subcoponents of the dense reward.
"""
reward = 0.
# sparse completion reward
if not self.reward_shaping and self._check_success():
reward = 1.0
# use a dense reward
if self.reward_shaping:
# max joint angles reward
joint_limits = self._joint_ranges
current_joint_pos = self._joint_positions
hitting_limits_reward = - np.sum([(x < joint_limits[i, 0] + 0.1 or x > joint_limits[i, 1] - 0.1) for i, x in
enumerate(current_joint_pos)])
reward += hitting_limits_reward
# reaching reward
goal_pos = self.sim.data.site_xpos[self.goal_site_id]
goal_pos_actual = goal_pos + np.array([0., 0., 0.025 + self.gripper_size])
eef_pos = self.sim.data.get_body_xpos("right_hand")
dist = np.linalg.norm(eef_pos - goal_pos_actual)
reaching_reward = -dist
reward += reaching_reward
# Hitting the table reward
hitting_the_table_reward = 0.0
hitting_the_table = self._check_contact_with("table_collision")
if (hitting_the_table):
hitting_the_table_reward -= 1.0
reward += hitting_the_table_reward
# Success Reward
success = self._check_success()
if (success):
reward += 0.1
# Return all three types of rewards
reward = {"reward": reward, "reaching_distance": reaching_reward,
"hitting_limits_reward": hitting_limits_reward,
"hitting_the_table_reward": hitting_the_table_reward,
"unstable":False}
return reward
def _check_success(self):
"""
Returns True if task has been completed.
"""
eef_pos = self.sim.data.get_body_xpos("right_hand")
goal_pos = self.sim.data.site_xpos[self.goal_site_id]
goal_pos_actual = goal_pos + np.array([0., 0., 0.025 + self.gripper_size])
dist = np.linalg.norm(goal_pos_actual - eef_pos)
success_radius = self.success_radius
# object centre is within the goal radius
return dist < success_radius
def _pre_action(self, action):
""" Takes the action, randomised the control timestep, and adds some additional random noise to the action."""
# Change control timestep to simulate various random time delays
timestep_parameter = self.dynamics_parameters['timestep_parameter']
self.control_timestep = self.init_control_timestep + self.np_random.exponential(scale=timestep_parameter)
super()._pre_action(action)
# Adding forces
self.sim.data.qfrc_applied[
self._ref_joint_vel_indexes
] += self.dynamics_parameters['joint_forces'] * self.np_random.uniform(-1, 1, 7)
self.sim.data.xfrc_applied[
self._ref_gripper_body_indx
] = self.dynamics_parameters['eef_forces'] * self.np_random.uniform(-1, 1, 6)
# Adding force proportional to acceleration
self.sim.data.qfrc_applied[
self._ref_joint_vel_indexes
] += self.dynamics_parameters['acceleration_forces'] * self.sim.data.qacc[
self._ref_joint_vel_indexes
]
def _post_action(self, action):
"""
Add dense reward subcomponents to info
"""
reward, done, info = super()._post_action(action)
if self.reward_shaping:
info = reward
reward = reward["reward"]
info["success"] = self._check_success()
return reward, done, info
def _get_observation(self):
"""
Returns an OrderedDict containing observations [(name_string, np.array), ...].
Important keys:
robot-state: contains robot-centric information.
object-state: requires @self.use_object_obs to be True.
contains object-centric information.
image: requires @self.use_camera_obs to be True.
contains a rendered frame from the simulation.
depth: requires @self.use_camera_obs and @self.camera_depth to be True.
contains a rendered depth map from the simulation
"""
di = OrderedDict()
# camera observations
if self.use_camera_obs:
camera_obs = self.sim.render(
camera_name=self.camera_name,
width=self.camera_width,
height=self.camera_height,
depth=self.camera_depth,
)
if self.camera_depth:
di["image"], di["depth"] = camera_obs
else:
di["image"] = camera_obs
# low-level object information
if self.use_object_obs:
# Extract position and velocity of the eef
eef_pos_in_world = self.sim.data.get_body_xpos("right_hand")
eef_xvelp_in_world = self.sim.data.get_body_xvelp("right_hand")
# Apply time delays
eef_pos_in_world = self._apply_time_delay(eef_pos_in_world, self.eef_pos_queue)
eef_xvelp_in_world = self._apply_time_delay(eef_xvelp_in_world, self.eef_vel_queue)
# Add random noise to the observations
position_noise = self.dynamics_parameters['eef_obs_position_noise']
velocity_noise = self.dynamics_parameters['eef_obs_velocity_noise']
eef_pos_in_world = eef_pos_in_world + self.np_random.normal(loc=0., scale=position_noise)
eef_xvelp_in_world = eef_xvelp_in_world + self.np_random.normal(loc=0., scale=velocity_noise)
# Get the goal position in the world
goal_pos_in_world = np.array(self.sim.data.site_xpos[self.goal_site_id])
# Correct for the fact that in the real robot we record the eef position at the goal as the observation
goal_pos_in_world = goal_pos_in_world + np.array([0., 0., self.gripper_size])
# Get object to goal vectors in EEF frame
eef_to_goal_in_world = goal_pos_in_world - eef_pos_in_world
eef_to_goal_in_eef = self.world_rot_in_eef.dot(eef_to_goal_in_world)
eef_xvelp_in_eef = self.world_rot_in_eef.dot(eef_xvelp_in_world)
# Record observations into a dictionary
di["eef_pos_in_world"] = eef_pos_in_world
di["eef_vel_in_world"] = eef_xvelp_in_world
di["goal_pos_in_world"] = goal_pos_in_world
di["task-state"] = np.concatenate([eef_to_goal_in_eef, eef_xvelp_in_eef])
return di
def _apply_time_delay(self, object, queue):
queue.appendleft(copy.deepcopy(object))
if (len(queue) == queue.maxlen):
return queue.pop()
else:
return queue[-1]
def _check_contact(self):
"""
Returns True if gripper is in contact with an object.
"""
collision = False
for contact in self.sim.data.contact[: self.sim.data.ncon]:
if (
self.sim.model.geom_id2name(contact.geom1)
in self.gripper.contact_geoms()
or self.sim.model.geom_id2name(contact.geom2)
in self.gripper.contact_geoms()
):
collision = True
break
return collision
def _check_contact_with(self, object):
"""
Returns True if gripper is in contact with an object.
"""
collision = False
for contact in self.sim.data.contact[: self.sim.data.ncon]:
if (
(self.sim.model.geom_id2name(contact.geom1) in self.gripper.contact_geoms()
and contact.geom2 == self.sim.model.geom_name2id(object))
or (self.sim.model.geom_id2name(contact.geom2) in self.gripper.contact_geoms()
and contact.geom1 == self.sim.model.geom_name2id(object))
):
collision = True
break
return collision
def _gripper_visualization(self):
"""
Do any needed visualization here. Overrides superclass implementations.
"""
# color the gripper site appropriately based on distance to object
if self.gripper_visualization:
# get distance to object
object_site_id = self.sim.model.site_name2id(self.model.object_names[self.model.push_object_idx])
dist = np.sum(
np.square(
self.sim.data.site_xpos[object_site_id]
- self.sim.data.get_site_xpos("grip_site")
)
)
# set RGBA for the EEF site here
max_dist = 0.1
scaled = (1.0 - min(dist / max_dist, 1.)) ** 15
rgba = np.zeros(4)
rgba[0] = 1 - scaled
rgba[1] = scaled
rgba[3] = 0.5
self.sim.model.site_rgba[self.eef_site_id] = rgba
|
from setuptools import setup, find_packages
import versioneer
setup(
name='atmcorr',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='Atmospheric Correction using 6S',
author='Jonas Solvsteen',
author_email='josl@dhigroup.com',
packages=find_packages(),
entry_points="""
[console_scripts]
atmcorr=atmcorr.scripts.atmcorr:cli
""",
install_requires=[
'dg_calibration',
'sensor_response_curves',
'satmeta'])
|
cur_dir = 1
pos_x = 0
pos_y = 0
def mv(dir, val):
global pos_x, pos_y
if dir == 0:
pos_y -= val
elif dir == 1:
pos_x += val
elif dir == 2:
pos_y += val
elif dir == 3:
pos_x -= val
while True:
try:
ln = input()
dir = ln[0]
val = int(ln[1:])
if dir == 'N':
mv(0, val)
elif dir == 'E':
mv(1, val)
elif dir == 'S':
mv(2, val)
elif dir == 'W':
mv(3, val)
elif dir == 'F':
mv(cur_dir, val)
elif dir == 'L':
cur_dir = (cur_dir + (360 - val) // 90) % 4
elif dir == 'R':
cur_dir = (cur_dir + val // 90) % 4
except EOFError:
break
print(abs(pos_x) + abs(pos_y))
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for xla handling of placeholder_with_default."""
from tensorflow.compiler.tests import xla_test
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class PlaceholderTest(xla_test.XLATestCase):
def test_placeholder_with_default_default(self):
with self.session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(4.0)
ph = array_ops.placeholder_with_default(v, shape=[])
out = ph * 2
sess.run(variables.variables_initializer([v]))
self.assertEqual(8.0, self.evaluate(out))
def test_placeholder_with_default_fed(self):
with self.session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(4.0)
ph = array_ops.placeholder_with_default(v, shape=[])
out = ph * 2
sess.run(variables.variables_initializer([v]))
self.assertEqual(2.0, sess.run(out, {ph: 1.0}))
if __name__ == '__main__':
googletest.main()
|
import pytest
from chalice import ConflictError, UnprocessableEntityError, ChaliceViewError
from fire_manager.application.firefighter.add_firefighter.firefighter_request_error_serializers import \
NewFirefighterErrorSerializers
from fire_manager.domain.firefighter.add_firefighter.add_firefighter_usecase import \
FirefighterAlreadyRegistered, MissingMandatoryFields
class TestNewFirefighterErrorSerializers:
def test_should_return_a_409_http_error_if_firefighter_already_exists(self):
exception = FirefighterAlreadyRegistered()
with pytest.raises(ConflictError) as exception_raised:
NewFirefighterErrorSerializers.to_http_error(exception)
assert exception_raised.value.STATUS_CODE == 409
assert str(
exception_raised.value) == "ConflictError: Firefighter already exists, can't add it again"
def test_should_return_a_422_http_error_if_any_parameter_is_missing(self):
exception = MissingMandatoryFields(MissingMandatoryFields.NAME_IS_MISSING)
with pytest.raises(UnprocessableEntityError) as exception_raised:
NewFirefighterErrorSerializers.to_http_error(exception)
assert exception_raised.value.STATUS_CODE == 422
assert str(
exception_raised.value) == 'UnprocessableEntityError: Necessary arguments missing: Field "name" is mandatory'
def test_should_return_a_500_http_error_if_any_other_error_is_raised(self):
exception = StopIteration()
with pytest.raises(ChaliceViewError) as exception_raised:
NewFirefighterErrorSerializers.to_http_error(exception)
assert exception_raised.value.STATUS_CODE == 500
assert str(
exception_raised.value) == 'ChaliceViewError: An unknown error had happen ¯\_(ツ)_/¯'
|
# input
print("What's your name?")
name = input("> ")
print("Hello there " + name)
print("Give me a number")
userNumber = int(input("> "))
print(userNumber * 3)
|
from systems.commands.index import Command
class Stop(Command('db.stop')):
def exec(self):
self.log_result = False
self.manager.stop_service(self, 'zimagi-postgres', self.remove)
self.success('Successfully stopped PostgreSQL database service')
|
# -*- coding:utf-8 -*-
"""
..
---------------------------------------------------------------------
___ __ __ __ ___
/ | \ | \ | \ / the automatic
\__ |__/ |__/ |___| \__ annotation and
\ | | | | \ analysis
___/ | | | | ___/ of speech
http://www.sppas.org/
Use of this software is governed by the GNU Public License, version 3.
SPPAS is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SPPAS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
This banner notice must not be removed.
---------------------------------------------------------------------
src.utils.tests.test_datatype.py
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:author: Brigitte Bigi
:organization: Laboratoire Parole et Langage, Aix-en-Provence, France
:contact: develop@sppas.org
:license: GPL, v3
:copyright: Copyright (C) 2011-2019 Brigitte Bigi
:summary: Test the utility datatype classes.
"""
import unittest
from ..datatype import bidict
# ---------------------------------------------------------------------------
class TestBiDict(unittest.TestCase):
"""Test bidirectional dictionary. """
def test_create(self):
d = bidict()
d = bidict({'a': 1})
def test_setitem(self):
"""Test setitem in a bidict. """
d = bidict()
d['a'] = 1
self.assertTrue('a' in d)
self.assertTrue(1 in d)
self.assertEqual(1, d['a'])
self.assertEqual(1, d.get('a'))
self.assertEqual('a', d[1])
b = bidict({'a': 1})
self.assertTrue('a' in b)
self.assertTrue(1 in b)
self.assertEqual(1, b['a'])
self.assertEqual('a', b[1])
with self.assertRaises(KeyError):
b[2]
def test_delitem(self):
"""Test delitem in a bidict. """
d = bidict({'a': 1})
d['a'] = 2
self.assertTrue('a' in d)
self.assertTrue(2 in d)
self.assertFalse(1 in d)
self.assertNotEqual(1, d['a'])
self.assertEqual(2, d['a'])
self.assertEqual('a', d[2])
del d[2]
self.assertFalse(2 in d)
self.assertFalse('a' in d)
|
from __future__ import print_function
import numpy as np
import os
from sensor_msgs.msg import LaserScan
from navrep.tools.data_extraction import archive_to_lidar_dataset
from navrep.tools.rings import generate_rings
from navrep.models.vae2d import ConvVAE, reset_graph
DEBUG_PLOTTING = True
# Parameters for training
batch_size = 1
NUM_EPOCH = 100
DATA_DIR = "record"
HOME = os.path.expanduser("~")
vae_model_path = os.path.expanduser("~/navrep/models/V/vae.json")
# create network
reset_graph()
vae = ConvVAE(batch_size=batch_size, is_training=False)
# load
vae.load_json(vae_model_path)
# create training dataset
dataset = archive_to_lidar_dataset("~/navrep/datasets/V/ian", limit=180)
if len(dataset) == 0:
raise ValueError("no scans found, exiting")
print(len(dataset), "scans in dataset.")
# split into batches:
total_length = len(dataset)
num_batches = int(np.floor(total_length / batch_size))
# rings converter
rings_def = generate_rings(64, 64)
dummy_msg = LaserScan()
dummy_msg.range_max = 100.0
dummy_msg.ranges = range(1080)
for idx in range(num_batches):
batch = dataset[idx * batch_size : (idx + 1) * batch_size]
scans = batch
rings = rings_def["lidar_to_rings"](scans).astype(float)
obs = rings / rings_def["rings_to_bool"]
# remove "too close" points
obs[:, :, 0, :] = 0.0
rings_pred = vae.encode_decode(obs) * rings_def["rings_to_bool"]
if True:
import matplotlib.pyplot as plt
plt.ion()
plt.figure("rings")
plt.clf()
fig, (ax1, ax2) = plt.subplots(
2, 1, num="rings",
)
ax1.imshow(np.reshape(rings, (64, 64)))
ax2.imshow(np.reshape(rings_pred, (64, 64)))
ax1.set_title(idx)
# update
plt.pause(0.01)
|
from cuser.middleware import CuserMiddleware
from django.db import models
from django.urls import reverse_lazy
from asset.utils import ASSET_TYPE_CHOICES, SHELL
from team.models import Profile
class Asset(models.Model):
name = models.CharField(unique=True, max_length=100)
serial_number = models.CharField(null=True, blank=True, max_length=100)
description = models.TextField(null=True, blank=True)
type = models.CharField(
max_length=6,
choices=ASSET_TYPE_CHOICES,
default=SHELL,
)
acquisition_date = models.DateField(null=True, blank=True, help_text='Date of purchase.')
acquisition_price = models.DecimalField(max_digits=10, decimal_places=2, null=True, blank=True)
retirement_date = models.DateField(null=True, blank=True, help_text='Date removed from service.')
retirement_reason = models.TextField(null=True, blank=True)
date_added = models.DateTimeField(auto_now_add=True)
date_updated = models.DateTimeField(auto_now=True)
created_by = models.ForeignKey(
Profile,
on_delete=models.PROTECT,
related_name='assets_created',
)
last_modified_by = models.ForeignKey(
Profile,
on_delete=models.PROTECT,
related_name='assets_last_modified',
)
class Meta:
ordering = ['-date_updated']
def __str__(self):
return '%s' % (self.name,)
def save(self, *args, **kwargs):
user = CuserMiddleware.get_user()
if not self.pk:
self.created_by = user.profile
self.last_modified_by = user.profile
super(Asset, self).save(*args, **kwargs)
@property
def absolute_url(self):
return reverse_lazy('asset:view', kwargs={'pk': self.pk})
|
#!/usr/bin/env python
import sys,os,re
consec = re.compile('([0-9,-]+) && ([0-9,-]+)')
consecv = re.compile('\(([0-9,-]+)\).+\(([0-9,-]+)\)')
from time import sleep
while 1:
handle = open(sys.argv[1],'r')
data = []
handle.seek(0)
for i in handle.readlines():
line = i.strip().split("\t")
data.append(line)
data.sort(key=lambda x:(100-float(x[-2]),float(x[-1])))
for v in data:
thisline = "\t".join(v)
score = float(v[-1])
if score < int(sys.argv[3]):
continue
tmss = consec.search(thisline).groups() if bool(consec.search(thisline)) is True else consecv.search(thisline).groups()
if sys.argv[2] == 'consec':
start = int(re.split(',|-',tmss[0])[-1])
end = int(re.split(',|-',tmss[1])[0])
if start + 1 == end:
print thisline
if sys.argv[2] != 'all' and sys.argv[2] !='consec':
if tmss[1] ==sys.argv[2]:
print thisline
if sys.argv[2] == 'all':
print thisline
print "#%i Results... "%len(data)
what = raw_input("Push Enter to refresh...")
os.system('clear')
|
# Settings for testing with included docker-compose and pytest
from .settings import * # noqa
# Subscribe from remote selenium container to docker-compose nginx container
NGINX_PUSH_STREAM_PUB_HOST = "localhost"
NGINX_PUSH_STREAM_PUB_PORT = "9080"
# Subscribe from local TravisCI machine to docker-compose nginx container
NGINX_PUSH_STREAM_SUB_HOST = "webserver"
NGINX_PUSH_STREAM_SUB_PORT = "80"
|
import asyncio
from pypykatz import logging
async def dcsync(url, username = None):
from aiosmb.commons.connection.url import SMBConnectionURL
from aiosmb.commons.interfaces.machine import SMBMachine
smburl = SMBConnectionURL(url)
connection = smburl.get_connection()
users = []
if username is not None:
users.append(username)
async with connection:
logging.debug('[DCSYNC] Connecting to server...')
_, err = await connection.login()
if err is not None:
raise err
logging.debug('[DCSYNC] Connected to server!')
logging.debug('[DCSYNC] Running...')
i = 0
async with SMBMachine(connection) as machine:
async for secret, err in machine.dcsync(target_users=users):
if err is not None:
raise err
i += 1
if i % 1000 == 0:
logging.debug('[DCSYNC] Running... %s' % i)
await asyncio.sleep(0)
yield secret
logging.debug('[DCSYNC] Finished!')
|
# Copyright (c) 2018 China Telecom Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import resource
class Quota(resource.Resource):
resource_key = 'quota'
resources_key = 'quotas'
base_path = '/lbaas/quotas'
# capabilities
allow_fetch = True
allow_commit = True
allow_delete = True
allow_list = True
# Properties
#: The maximum amount of load balancers you can have. *Type: int*
load_balancers = resource.Body('load_balancer', type=int)
#: The maximum amount of listeners you can create. *Type: int*
listeners = resource.Body('listener', type=int)
#: The maximum amount of pools you can create. *Type: int*
pools = resource.Body('pool', type=int)
#: The maximum amount of health monitors you can create. *Type: int*
health_monitors = resource.Body('health_monitor', type=int)
#: The maximum amount of members you can create. *Type: int*
members = resource.Body('member', type=int)
#: The ID of the project this quota is associated with.
project_id = resource.Body('project_id', alternate_id=True)
def _prepare_request(self, requires_id=True,
base_path=None, prepend_key=False, **kwargs):
_request = super(Quota, self)._prepare_request(requires_id,
prepend_key,
base_path=base_path)
if self.resource_key in _request.body:
_body = _request.body[self.resource_key]
else:
_body = _request.body
if 'id' in _body:
del _body['id']
return _request
class QuotaDefault(Quota):
base_path = '/lbaas/quotas/defaults'
allow_retrieve = True
allow_commit = False
allow_delete = False
allow_list = False
|
from typing import Tuple
from TLOA.core.constants import MAX_SHIP_HEALTH
from kivy.event import EventDispatcher
from kivy.properties import BoundedNumericProperty
from kivy.uix.widget import Widget
class Entity(EventDispatcher):
id: str = ''
shape: Widget = None
def step(self, dt, game):
pass
def __repr__(self):
return f'{self.__class__.__name__}()'
class MovingEntity(Entity):
health = BoundedNumericProperty(MAX_SHIP_HEALTH, min=0, max=MAX_SHIP_HEALTH,
errorhandler=lambda x: 0 if x < 0 else MAX_SHIP_HEALTH)
velocity: Tuple[float, float]
def __init__(self, health, velocity, **kwargs):
super().__init__(**kwargs)
self.health = health
self.velocity = velocity
@property
def is_dead(self):
return self.health <= 0
def step(self, dt, game):
x, y = self.shape.pos
dx, dy = self.velocity
self.shape.pos = (x + dx, y + dy)
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import logging
from oslo_config import cfg
from oslo_messaging._drivers import impl_zmq
from oslo_messaging._drivers.zmq_driver.broker import zmq_proxy
from oslo_messaging._drivers.zmq_driver.broker import zmq_queue_proxy
from oslo_messaging import server
CONF = cfg.CONF
CONF.register_opts(impl_zmq.zmq_opts)
CONF.register_opts(server._pool_opts)
CONF.rpc_zmq_native = True
USAGE = """ Usage: ./zmq-proxy.py [-h] [] ...
Usage example:
python oslo_messaging/_cmd/zmq-proxy.py"""
def main():
parser = argparse.ArgumentParser(
description='ZeroMQ proxy service',
usage=USAGE
)
parser.add_argument('--config-file', dest='config_file', type=str,
help='Path to configuration file')
parser.add_argument('-d', '--debug', dest='debug', type=bool,
default=False,
help="Turn on DEBUG logging level instead of INFO")
args = parser.parse_args()
if args.config_file:
cfg.CONF(["--config-file", args.config_file])
log_level = logging.INFO
if args.debug:
log_level = logging.DEBUG
logging.basicConfig(level=log_level,
format='%(asctime)s %(name)s '
'%(levelname)-8s %(message)s')
reactor = zmq_proxy.ZmqProxy(CONF, zmq_queue_proxy.UniversalQueueProxy)
try:
while True:
reactor.run()
except (KeyboardInterrupt, SystemExit):
reactor.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from frovedis.exrpc.server import FrovedisServer
from frovedis.mllib.tree import DecisionTreeClassifier
from frovedis.mllib.tree import DecisionTreeRegressor
import sys
import numpy as np
import pandas as pd
#Objective: Run without error
# initializing the Frovedis server
argvs = sys.argv
argc = len(argvs)
if (argc < 2):
print ('Please give frovedis_server calling command as the first argument \n(e.g. "mpirun -np 2 -x /opt/nec/nosupport/frovedis/ve/bin/frovedis_server")')
quit()
FrovedisServer.initialize(argvs[1])
mat = pd.DataFrame([[10, 0, 1, 0, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 0, 1, 0, 1],
[1, 0, 0, 1, 0, 1, 0]],dtype=np.float64)
lbl = np.array([0, 1, 1.0, 0],dtype=np.float64)
# fitting input matrix and label on DecisionTree Classifier object
dtc1 = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=5,
min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0,
max_features=None, random_state=None, max_leaf_nodes=1,
min_impurity_decrease=0.0,
class_weight=None, presort=False, verbose = 0)
dtc = dtc1.fit(mat,lbl)
dtc.debug_print()
# predicting on train model
print("predicting on DecisionTree classifier model: ")
dtcm = dtc.predict(mat)
print (dtcm)
print (dtc.predict_proba(mat))
print("Accuracy score for predicted DecisionTree Classifier model")
print (dtc.score(mat,lbl))
# fitting input matrix and label on DecisionTree Regressor object
dtr1 = DecisionTreeRegressor(criterion='mse', splitter='best',
max_depth=5, min_samples_split=2, min_samples_leaf=1,
min_weight_fraction_leaf=0.0, max_features=None, random_state=None,
max_leaf_nodes=1, min_impurity_decrease=0.0, min_impurity_split=None,
class_weight=None, presort=False, verbose = 0)
lbl1 = np.array([1.2,0.3,1.1,1.9])
dtr = dtr1.fit(mat,lbl1)
dtr.debug_print()
# predicting on train model
print("predicting on DecisionTree Regressor model: ")
dtrm = dtr.predict(mat)
print (dtrm)
print("Root mean square for predicted DecisionTree Regressor model")
print (dtr.score(mat,lbl1))
#clean-up
dtc.release()
dtr.release()
FrovedisServer.shut_down()
|
class Solution:
# @param {integer[]} nums
# @param {integer} k
# @return {integer[]}
def maxSlidingWindow(self, nums, k):
result = []
for i in range(len(nums) - k + 1):
if i >= i + k:
continue
result.append(max(nums[i:i + k]))
return result
|
from aoc import AOC
aoc = AOC(year=2018, day=19)
data = aoc.load()
def addr(A, B, C, reg):
reg[C] = reg[A] + reg[B]
def addi(A, B, C, reg):
reg[C] = reg[A] + B
def mulr(A, B, C, reg):
reg[C] = reg[A] * reg[B]
def muli(A, B, C, reg):
reg[C] = reg[A] * B
def banr(A, B, C, reg):
reg[C] = reg[A] & reg[B]
def bani(A, B, C, reg):
reg[C] = reg[A] & B
def borr(A, B, C, reg):
reg[C] = reg[A] | reg[B]
def bori(A, B, C, reg):
reg[C] = reg[A] | B
def setr(A, _, C, reg):
reg[C] = reg[A]
def seti(A, _, C, reg):
reg[C] = A
def gtir(A, B, C, reg):
reg[C] = 1 if A > reg[B] else 0
def gtri(A, B, C, reg):
reg[C] = 1 if reg[A] > B else 0
def gtrr(A, B, C, reg):
reg[C] = 1 if reg[A] > reg[B] else 0
def eqir(A, B, C, reg):
reg[C] = 1 if A == reg[B] else 0
def eqri(A, B, C, reg):
reg[C] = 1 if reg[A] == B else 0
def eqrr(A, B, C, reg):
reg[C] = 1 if reg[A] == reg[B] else 0
op_map = {
"addr": addr,
"addi": addi,
"mulr": mulr,
"muli": muli,
"banr": banr,
"bani": bani,
"borr": borr,
"bori": bori,
"setr": setr,
"seti": seti,
"gtir": gtir,
"gtri": gtri,
"gtrr": gtrr,
"eqir": eqir,
"eqri": eqri,
"eqrr": eqrr,
}
registers = [0] * 6
ip_reg = 0
program = []
for line in data.lines():
if "#ip" in line:
ip_reg = int(line[4])
ip = registers[ip_reg]
else:
instruction = line.split(" ")
program.append([instruction[0]] + [int(i) for i in instruction[1:]])
while ip in range(0, len(program)):
instruction = program[ip]
registers[ip_reg] = ip
op_map[instruction[0]](instruction[1], instruction[2], instruction[3], registers)
ip = registers[ip_reg] + 1
aoc.p1(registers[0])
## Part 2
# Solved manually
aoc.p2(25165632)
|
import numpy as np
from numpy.testing import assert_array_equal
from util import runparams
import swe.simulation as sn
class TestSimulation(object):
@classmethod
def setup_class(cls):
""" this is run once for each class before any tests """
pass
@classmethod
def teardown_class(cls):
""" this is run once for each class after all tests """
pass
def setup_method(self):
""" this is run before each test """
self.rp = runparams.RuntimeParameters()
self.rp.params["mesh.nx"] = 8
self.rp.params["mesh.ny"] = 8
self.rp.params["particles.do_particles"] = 0
self.rp.params["swe.grav"] = 1.0
self.sim = sn.Simulation("swe", "test", self.rp)
self.sim.initialize()
def teardown_method(self):
""" this is run after each test """
self.rp = None
self.sim = None
def test_initializationst(self):
h = self.sim.cc_data.get_var("height")
assert h.min() == 1.0 and h.max() == 1.0
def test_prim(self):
# U -> q
g = self.sim.cc_data.get_aux("g")
q = sn.cons_to_prim(self.sim.cc_data.data, g, self.sim.ivars, self.sim.cc_data.grid)
# q -> U
U = sn.prim_to_cons(q, g, self.sim.ivars, self.sim.cc_data.grid)
assert_array_equal(U, self.sim.cc_data.data)
def test_derives(self):
g = self.sim.cc_data.get_aux("g")
cs = self.sim.cc_data.get_var("soundspeed")
assert np.all(cs == np.sqrt(g))
|
import sys
from django.shortcuts import render
from sfdo_template_helpers.oauth2.salesforce.views import SalesforcePermissionsError
from config.settings.base import IP_RESTRICTED_MESSAGE
GENERIC_ERROR_MSG = "An internal error occurred while processing your request."
def custom_permission_denied_view(request, exception):
message = GENERIC_ERROR_MSG
if isinstance(exception, SalesforcePermissionsError):
message = str(exception)
return render(
request,
"index.html",
context={"JS_CONTEXT": {"error_message": message}},
status=403,
)
def custom_500_view(request):
message = GENERIC_ERROR_MSG
value = sys.exc_info()[1]
if "ip restricted" in value.args[0]:
message = IP_RESTRICTED_MESSAGE
return render(
request,
"index.html",
context={"JS_CONTEXT": {"error_message": message}},
status=500,
)
|
#!/usr/bin/env python
""" hostlists plugin to get hosts from a range """
# Copyright (c) 2010-2013 Yahoo! Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
from hostlists.plugin_base import HostlistsPlugin
class HostlistsPluginRange(HostlistsPlugin):
names = ['range']
def expand(self, value, name=None):
"""
Use Plugin to expand the value
"""
return self.expand_item(value)
def block_to_list(self, block):
""" Convert a range block into a numeric list
input "1-3,17,19-20"
output=[1,2,3,17,19,20]
"""
block += ','
result = []
val = val1 = ''
in_range = False
for letter in block:
if letter in [',', '-']:
if in_range:
val2 = val
val2_len = len(val2)
# result+=range(int(val1),int(val2)+1)
for value in range(int(val1), int(val2) + 1):
if val1.startswith('0'):
result.append(str(value).zfill(val2_len))
else:
result.append(str(value))
val = ''
val1 = None
in_range = False
else:
val1 = val
val1_len = len(val1)
val = ''
if letter == ',':
if val1 is not None:
result.append(val1.zfill(val1_len)) # pragma: no cover
else:
in_range = True
else:
val += letter
return result
def expand_item(self, item):
result = []
in_block = False
pre_block = ''
for count in range(0, len(item)):
letter = item[count]
if letter == '[':
in_block = True
block = ''
elif letter == ']' and in_block:
in_block = False
for value in self.block_to_list(block):
result.append('%s%s%s' % (pre_block, value, item[count + 1:]))
elif in_block:
block += letter
elif not in_block:
pre_block += letter
if len(result):
return result
else:
return [item] # pragma: no cover
|
#!/usr/local/bin/python
'''
Read a bed file (at least 3 columns) and compute the number of chromosomes and positions covered
BEGIN COPYRIGHT NOTICE
countBedPositions code -- (c) 2017 Dimitrios Kleftogiannis -- ICR -- www.icr.ac.uk
Copyright 2017 Dimitrios Kleftogiannis Licensed under the
Educational Community License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may
obtain a copy of the License at
https://opensource.org/licenses/ECL-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
Published reports of research using this code (or a modified version) should cite the
relevant article of this program.
The idea is adapted from Sam2Tsv implementation in Java from http://lindenb.github.io/jvarkit/Sam2Tsv.html
Comments and bug reports are welcome.
Email to dimitrios.kleftogiannis@icr.ac.uk
I would also appreciate hearing about how you used this code, improvements that you have made to it.
You are free to modify, extend or distribute this code, as long as this copyright notice is included whole and unchanged.
END COPYRIGHT NOTICE
UTILITY
This program takes as input a bed file with at least 3 columns and prints the number of positions covered. Can be helpful when analyzing panel
designs for deep sequencing experiments.
INPUT ARGUMENTS
1. bed file : a bed file
OUTPUT
The program outputs the number of intervals called amplicons, the number of chromosomes and the total number of positions.
DEPENDENCIES
This is a Python program, thus you need the Python 2 compiler to be installed in your computer.
The program has been developed and tested in a Mac OS computer with El Capitan version 10.11.5
The Python compiler used for development is the Python Python 2.7.10 (default, Oct 23 2015, 19:19:21)
The program works for Unix-like systems but has not been tested for Windows operating systems.
The program has not been tested in Cygwing-like systems running under Windows operating systems.
The program depends on pysam libraries downloaded from http://pysam.readthedocs.io/en/latest/index.html
The program also depends on samtools, so please make sure that SAMtools is installed and configured properly in your system
You might need to add samtools in your path so after you intall SAMtools you might need a command like:
PATH=$PATH:/your/path/to/Samtools
RUNNING
An execution example is as follows:
python countBedPositions.py bedFile=panelDesign.bed
To obtaine toy data used during the code developement please contact Dimitrios
'''
#modules we need, I might have some extra that didnt use in the final version, but I forgot to remove.
#Remember that this program is under-developement so you may find block of codes used for testing.
import sys
import os
import pysam
import re
from collections import defaultdict
from itertools import groupby
import datetime
import time
#prints information about program's execution
def printUsage():
print('To run this program please type the following:')
print('\tpython countBedPositions.py bedFile=file.bed\n')
print('Where:\n')
print('\tfile.bed is a bed file with at least 3 columns (chromosome TAB start TAB end)\n')
print('Please give the arguments in the indicated order similar to the provided example!\n')
#save the genomic positions of interest; remember this is not the panel design
def countPositions(bedFile):
#save the positions
#check if bed file exists
if os.path.exists(bedFile):
#the file exists
#open the file and read it line by line
fileIN=open(bedFile,'r')
#variables we need
countPos=0
countAmplicons=0
#store the chromosome names
aList=[]
#store the uniq genomic positions: This is helpful since some amplicons frequently overlap
posList=[]
for eachLine in fileIN:
line = eachLine.rstrip('\n')
tmp=line.split("\t")
chrom=tmp[0]
startPos=int(tmp[1])
endPos=int(tmp[2])
countAmplicons=countAmplicons+1
aList.append(chrom)
for i in range(startPos,endPos+1):
key=chrom+'_'+str(i)
posList.append(key)
countPos=countPos+1
countChrom=len(list(set(aList)))
myStr='Chromosomes:\t'+str(countChrom)+'\tGenomic_Positions:\t'+str(countPos)+'\tUniq_Genomic_Positions:\t'+str(len(list(set(posList))))+'\tAmplicons:\t'+str(countAmplicons)
return myStr
else:
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
print('[%s] ERROR from function: countPositions. The bed file does not exist!\n'%(st))
print('************************************************************************************************************************************\n')
sys.exit()
#main function of the program
def myMain():
#check the number of input arguments
if len(sys.argv)!=2:
print('************************************************************************************************************************************\n')
print('\t\t\t\t\tYour input arguments are not correct!\n')
print('\t\t\t\t\t\tCEC Bioinformatics Team\n')
print('\t\t\tCopyright 2017 ICR -- Dimitrios Kleftogiannis -- dimitrios.kleftogiannis@icr.ac.uk\n')
printUsage()
else:
print('************************************************************************************************************************************\n')
print('\t\t\t\t\tcountBedPositions.py: Count the genomic positions and the chromosomes in a bed file \n')
print('\t\t\t\t\t\t\tCEC Bioinformatics Team\n')
print('\t\t\t\tCopyright 2017 ICR -- Dimitrios Kleftogiannis -- dimitrios.kleftogiannis@icr.ac.uk\n')
#parse the first input arguments
#here if the user does not write the correct argument name it gets an error and the program stops
bedFile=sys.argv[1].split('bedFile=')
bedFile=bedFile[1]
print('Execution started with the following parameters:\n')
print('1. bedFile : \t\t\t\t%s' % bedFile)
#generate the sam file
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
print('\n[%s] Function countPositions: parsing the input file'%(st))
results=countPositions(bedFile)
print('\n%s'%(results))
print('************************************************************************************************************************************\n')
#this is where we start
if __name__=='__main__':
myMain()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys, os
testdir = os.path.dirname(__file__)
srcdir = "../clarity"
sys.path.insert(0, os.path.abspath(os.path.join(testdir, srcdir)))
import unittest
import clarity as log
from colored import fore, style
from typing import Dict
from unittest.mock import patch
class TestClarity(unittest.TestCase):
"""Unit tests for clarity logger."""
def _run_test(self, method: str, color: str, use_str: bool = True) -> None:
with patch("clarity.logging.{}".format(method)) as mocked_method:
message = "test {} message".format(method)
if use_str:
expected = color + message + style.RESET
getattr(log, method)(message)
else:
expected = color + "{{'msg': '{}'}}".format(message) + style.RESET
getattr(log, method)({"msg": message})
mocked_method.assert_called_with(expected)
def test_all_methods(self) -> None:
cases: Dict[str, str] = {
"exception": fore.ORANGE_1,
"fatal": fore.RED,
"error": fore.LIGHT_RED,
"warning": fore.YELLOW,
"info": fore.DARK_GRAY,
"debug": fore.STEEL_BLUE,
}
for level, color in cases.items():
self._run_test(level, color)
def test_non_str_messages(self) -> None:
cases: Dict[str, str] = {
"exception": fore.ORANGE_1,
"fatal": fore.RED,
"error": fore.LIGHT_RED,
"warning": fore.YELLOW,
"info": fore.DARK_GRAY,
"debug": fore.STEEL_BLUE,
}
for level, color in cases.items():
self._run_test(level, color, use_str=False)
def suite():
"""Test suite"""
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestClarity))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
|
# Generated by Django 2.2.24 on 2022-01-08 14:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import phone_field.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Neighbourhood_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bio', models.TextField(blank=True, max_length=400)),
('name', models.CharField(blank=True, max_length=120)),
('profile_pic', models.ImageField(default='v1639327874/images/default_drurzc.jpg', upload_to='images/')),
('phone_number', phone_field.models.PhoneField(blank=True, max_length=15)),
('neighbourhood', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='members', to='Neighbourhood_app.NeighbourHood')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
]
|
# Time: O(logn), where logn is the length of result strings
# Space: O(1)
# Given two integers representing the numerator and denominator of a fraction,
# return the fraction in string format.
#
# If the fractional part is repeating, enclose the repeating part in parentheses.
#
# For example,
#
# Given numerator = 1, denominator = 2, return "0.5".
# Given numerator = 2, denominator = 1, return "2".
# Given numerator = 2, denominator = 3, return "0.(6)".
class Solution(object):
def fractionToDecimal(self, numerator, denominator):
"""
:type numerator: int
:type denominator: int
:rtype: str
"""
result = ""
if (numerator > 0 and denominator < 0) or (numerator < 0 and denominator > 0):
result = "-"
dvd, dvs = abs(numerator), abs(denominator)
result += str(dvd / dvs)
dvd %= dvs
if dvd > 0:
result += "."
lookup = {}
while dvd and dvd not in lookup:
lookup[dvd] = len(result)
dvd *= 10
result += str(dvd / dvs)
dvd %= dvs
if dvd in lookup:
result = result[:lookup[dvd]] + "(" + result[lookup[dvd]:] + ")"
return result
if __name__ == "__main__":
print Solution().fractionToDecimal(1, 9)
print Solution().fractionToDecimal(-50, 8)
print Solution().fractionToDecimal(22, 2)
print Solution().fractionToDecimal(-22, -2)
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2018-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Infrastructure for intercepting requests."""
import enum
import dataclasses
from typing import Callable, List, Optional
from PyQt5.QtCore import QUrl
class ResourceType(enum.Enum):
"""Possible request types that can be received.
Currently corresponds to the QWebEngineUrlRequestInfo Enum:
https://doc.qt.io/qt-5/qwebengineurlrequestinfo.html#ResourceType-enum
"""
main_frame = 0
sub_frame = 1
stylesheet = 2
script = 3
image = 4
font_resource = 5
sub_resource = 6
object = 7
media = 8
worker = 9
shared_worker = 10
prefetch = 11
favicon = 12
xhr = 13
ping = 14
service_worker = 15
csp_report = 16
plugin_resource = 17
# 18 is "preload", deprecated in Chromium
preload_main_frame = 19
preload_sub_frame = 20
unknown = 255
class RedirectException(Exception):
"""Raised when the request was invalid, or a request was already made."""
@dataclasses.dataclass
class Request:
"""A request which can be intercepted/blocked."""
#: The URL of the page being shown.
first_party_url: Optional[QUrl]
#: The URL of the file being requested.
request_url: QUrl
is_blocked: bool = False
#: The resource type of the request. None if not supported on this backend.
resource_type: Optional[ResourceType] = None
def block(self) -> None:
"""Block this request."""
self.is_blocked = True
def redirect(self, url: QUrl, *, ignore_unsupported: bool = False) -> None:
"""Redirect this request.
Only some types of requests can be successfully redirected.
Improper use of this method can result in redirect loops.
This method will throw a RedirectException if the request was not possible.
Args:
url: The QUrl to try to redirect to.
ignore_unsupported: If set to True, request methods which can't be
redirected (such as POST) are silently ignored instead of throwing an
exception.
"""
# Will be overridden if the backend supports redirection
raise NotImplementedError
#: Type annotation for an interceptor function.
InterceptorType = Callable[[Request], None]
_interceptors: List[InterceptorType] = []
def register(interceptor: InterceptorType) -> None:
_interceptors.append(interceptor)
def run(info: Request) -> None:
for interceptor in _interceptors:
interceptor(info)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import glob
# Setting different extension allow to use this script with different
# RAW file formats
RAW_EXT = '.NEF'
JPG_EXT = '.JPG'
here = os.getcwd()
# With glob we can use wildcards for pattern matching
all_NEF = glob.glob(os.path.join(here, "*" + RAW_EXT))
if not all_NEF:
print "\033[0;94m\tThere are no NEF files in this directory."
exit(0)
all_JPG = glob.glob(os.path.join(here, "*" + JPG_EXT))
# We create a list of tuple:
# (only the file name, the entire path)
NEF_filenames = [(x.split('/')[-1].split(RAW_EXT)[0], x) for x in all_NEF]
orphans = []
for nef in NEF_filenames:
# If the NEF filename doesn't have the JPG counterpart
# we add it to the orphans list
if not filter(lambda element: nef[0] in element, all_JPG):
orphans.append(nef)
if orphans:
print "\033[0;91m{} NEF files to delete:".format(len(orphans))
for orphan in orphans:
print "\t\033[0;33m{}".format(orphan)
reply = raw_input("\033[0;97mDo you wanna delete them automatically? (y/n) ") # noqa
if reply is 'y':
print "\033[0;91mDeleting NEF files..."
for orphan in orphans:
print "\t\033[0;33mDeleting: {}".format(orphan[1])
os.remove(orphan[1])
if reply is 'n':
print " Ok, bye!"
else:
print "There are no NEF orphans in this directory. :)"
|
# coding=utf-8
import requests
import re, json
import math
def getStartingIndex(input_str):
m = re.finditer("http://", str(input_str))
ret = set()
for i in m:
# print(i.start())
ret.add(i.start())
return ret
def getSingleUrl(input_str, startIndex):
i = startIndex
while(input_str[i] is not "'"):
i=i+1
mystart = startIndex
myend = i
ret = input_str[mystart:myend]
return ret
def getUrlSet(my_string):
listOfIndex = getStartingIndex(my_string)
ret = set()
for i in listOfIndex:
some_url = getSingleUrl(my_string, i)
ret.add(some_url) # print(getSingleUrl(my_string, i))
return ret
def getJson(url):
url_json = str(url)+".json"
data = requests.get(url_json).json()
return data
def getEnglishValue(urlRes, jsonData):
ret = "NOT FOUND"
count = 0
a = jsonData[str(urlRes)]['http://www.w3.org/2000/01/rdf-schema#comment']
print(a)
for i in a:
if count == 10:
break
num = str(count)
if i['lang'] == 'en':
ret = i['value']
break
else:
count=count+1
return ret
if __name__ == "__main__":
urlres1 = "http://dbpedia.org/data/Alice_and_Bob"
urlres2 = "http://dbpedia.org/resource/Brad_Pitt"
lines = urlres2.replace( "resource", "data")
lines = lines.replace( "\"", "")
myjsonAlice = getJson(urlres1)
myjsonJimmy = getJson(lines)
valueAlice = myjsonAlice['http://dbpedia.org/resource/Alice_and_Bob']['http://www.w3.org/2000/01/rdf-schema#comment'][0] # = getEnglishValue(myjsonAlice)
valueJimmy = getEnglishValue(urlres2, myjsonJimmy) # myjsonJimmy['http://dbpedia.org/resource/Brad_Pitt']['http://www.w3.org/2000/01/rdf-schema#comment'][4] # = getEnglishValue(myjsonAlice)
print(valueAlice)
print('-----------')
print(valueJimmy)
pass
|
from morse_tools import Morse_Tools
morse_try1 = Morse_Tools()
result1 = morse_try1.morse_encrypt("Python Is Fun")
print(result1)
result1_decrypted = morse_try1.morse_decrypt(result1)
print(result1_decrypted)
morse_try1.play_morse(result1)
#morse_try1.show_morse_library()
|
# -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2021 Pincer
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from pincer.utils.api_object import APIObject
from pincer.utils.constants import MISSING, APINullable
from pincer.utils.snowflake import Snowflake
class PremiumTypes(Enum):
"""
The type of Discord premium a user has.
"""
NONE = 0
NITRO_CLASSIC = 1
NITRO = 2
@dataclass
class User(APIObject):
"""
Represents a Discord user. This can be a bot account or a
human account.
:param avatar:
the user's avatar hash
:param discriminator:
the user's 4-digit discord-tag
:param flags:
the flags on a user's account
:param id:
the user's id
:param username:
the user's username, not unique across the platform
:param accent_color:
the user's banner color encoded as an integer representation of
hexadecimal color code
:param banner:
the user's banner, or null if unset
:param bot:
whether the user belongs to an OAuth2 application
:param email:
the user's email
:param locale:
the user's chosen language option
:param mfa_enabled:
whether the user has two factor enabled on their account
:param premium_type:
the type of Nitro subscription on a user's account
:param public_flags:
the public flags on a user's account
:param system:
whether the user is an Official Discord System user (part of the urgent
message system)
:param verified:
whether the email on this account has been verified
"""
avatar: Optional[str]
discriminator: str
flags: int
id: Snowflake
username: str
accent_color: APINullable[Optional[int]] = MISSING
banner: APINullable[Optional[str]] = MISSING
bot: APINullable[bool] = MISSING
email: APINullable[Optional[str]] = MISSING
locale: APINullable[str] = MISSING
mfa_enabled: APINullable[bool] = MISSING
premium_type: APINullable[int] = MISSING
public_flags: APINullable[int] = MISSING
system: APINullable[bool] = MISSING
verified: APINullable[bool] = MISSING
@property
def premium(self) -> PremiumTypes:
"""
The user their premium type in a usable enum.
"""
return PremiumTypes(self.premium_type)
def __str__(self):
"""Return the discord tag when object gets used as a string."""
return self.username + '#' + self.discriminator
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.