content stringlengths 5 1.05M |
|---|
# Copyright (c) 2020 NVIDIA Corporation. All rights reserved.
# This work is licensed under the NVIDIA Source Code License - Non-commercial. Full
# text can be found in LICENSE.md
import torch
import torch.nn as nn
import torchvision.transforms as transforms
import time
import os
import sys
import math
import numpy as np
import cv2
import scipy
import threading
import cupy
import copy
from fcn.config import cfg
from fcn.multiscaleloss import multiscaleEPE, realEPE
from transforms3d.quaternions import mat2quat, quat2mat, qmult
from transforms3d.euler import mat2euler, euler2mat, euler2quat, quat2euler
from utils.show_flows import *
from utils.se3 import T_inv_transform, se3_mul, se3_inverse
from utils.zoom_in import zoom_images
from utils.pose_error import re, te
class Stream:
ptr = torch.cuda.current_stream().cuda_stream
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __repr__(self):
return '{:.3f} ({:.3f})'.format(self.val, self.avg)
def compute_delta_poses(pose_src_blob, pose_tgt_blob, zoom_factor):
num = pose_src_blob.shape[0]
pose_delta_blob = pose_src_blob.copy()
for i in range(num):
R_src = quat2mat(pose_src_blob[i, 2:6])
T_src = pose_src_blob[i, 6:]
R_tgt = quat2mat(pose_tgt_blob[i, 2:6])
T_tgt = pose_tgt_blob[i, 6:]
R_delta = np.dot(R_tgt, R_src.transpose())
T_delta = T_inv_transform(T_src, T_tgt)
pose_delta_blob[i, 2:6] = mat2quat(R_delta)
pose_delta_blob[i, 6] = T_delta[0] / zoom_factor[i, 0]
pose_delta_blob[i, 7] = T_delta[1] / zoom_factor[i, 1]
pose_delta_blob[i, 8] = T_delta[2]
return pose_delta_blob
if sys.version_info[0] < 3:
@cupy.util.memoize(for_each_device=True)
def cunnex(strFunction):
return cupy.cuda.compile_with_cache(globals()[strFunction]).get_function(strFunction)
else:
@cupy._util.memoize(for_each_device=True)
def cunnex(strFunction):
return cupy.cuda.compile_with_cache(globals()[strFunction]).get_function(strFunction)
compute_flow = '''
extern "C" __global__ void compute_flow(float* pc_tgt, float* pc_src, float* flow_map, float* RT,
float fx, float fy, float px, float py, int width, int height)
{
const int y = threadIdx.x + blockDim.x * blockIdx.x;
const int x = threadIdx.y + blockDim.y * blockIdx.y;
if (x < width && y < height)
{
flow_map[(y * width + x) * 2] = 0;
flow_map[(y * width + x) * 2 + 1] = 0;
float X = pc_src[(y * width + x) * 3];
float Y = pc_src[(y * width + x) * 3 + 1];
float Z = pc_src[(y * width + x) * 3 + 2];
if (Z > 0)
{
float vx = RT[0] * X + RT[1] * Y + RT[2] * Z + RT[3];
float vy = RT[4] * X + RT[5] * Y + RT[6] * Z + RT[7];
float vz = RT[8] * X + RT[9] * Y + RT[10] * Z + RT[11];
// projection
float w_proj = fx * (vx / vz) + px;
float h_proj = fy * (vy / vz) + py;
float z_proj = vz;
int w_proj_i = roundf(w_proj);
int h_proj_i = roundf(h_proj);
if (w_proj_i >= 0 && w_proj_i < width && h_proj_i >= 0 && h_proj_i < height)
{
float z_tgt = pc_tgt[(h_proj_i * width + w_proj_i) * 3 + 2];
if (fabs(z_proj - z_tgt) < 3E-3)
{
flow_map[(y * width + x) * 2] = w_proj - x;
flow_map[(y * width + x) * 2 + 1] = h_proj - y;
}
}
}
}
}
'''
def render_poses(intrinsic_matrix, label_blob, pose_tgt_blob, pose_src_blob, points_all, train_data):
height = cfg.TRAIN.SYN_HEIGHT
width = cfg.TRAIN.SYN_WIDTH
ratio = float(height) / float(width)
fx = intrinsic_matrix[0, 0]
fy = intrinsic_matrix[1, 1]
px = intrinsic_matrix[0, 2]
py = intrinsic_matrix[1, 2]
zfar = 6.0
znear = 0.01
num = pose_tgt_blob.shape[0]
pixel_mean = torch.tensor(cfg.PIXEL_MEANS / 255.0).cuda().float()
image_tensor = torch.cuda.FloatTensor(height, width, 4).detach()
seg_tensor = torch.cuda.FloatTensor(height, width, 4).detach()
pcloud_tensor = torch.cuda.FloatTensor(height, width, 4).detach()
threadsperblock = (32, 32, 1)
blockspergrid_x = math.ceil(height / threadsperblock[0])
blockspergrid_y = math.ceil(width / threadsperblock[1])
blockspergrid = (int(blockspergrid_x), int(blockspergrid_y), 1)
qt = np.zeros((7, ), dtype=np.float32)
RT_tgt = np.zeros((3, 4), dtype=np.float32)
RT_src = np.zeros((3, 4), dtype=np.float32)
# set renderer
cfg.renderer.set_light_pos([0, 0, 0])
cfg.renderer.set_light_color([2, 2, 2])
cfg.renderer.set_projection_matrix(width, height, fx, fy, px, py, znear, zfar)
for i in range(num):
image_id = int(pose_tgt_blob[i, 0])
if cfg.MODE == 'TEST' and cfg.TEST.SYNTHESIZE == False:
cls_index = int(pose_tgt_blob[i, 1])
else:
cls_index = cfg.TRAIN.CLASSES[int(pose_tgt_blob[i, 1])]
# render target pose
qt[:3] = pose_tgt_blob[i, 6:]
qt[3:] = pose_tgt_blob[i, 2:6]
cfg.renderer.set_poses([qt])
cfg.renderer.render([cls_index], image_tensor, seg_tensor, pc2_tensor=pcloud_tensor)
image_tensor = image_tensor.flip(0)
pcloud_tensor = pcloud_tensor.flip(0)
# mask the target point cloud
mask = label_blob[image_id, int(pose_tgt_blob[i, 1]), :, :]
mask_tensor = torch.from_numpy(np.tile(mask[:,:,np.newaxis], (1, 1, 3))).contiguous().cuda()
im_tgt = image_tensor[:, :, (2, 1, 0)] - pixel_mean
train_data['image_tgt_blob_color'][i].copy_(im_tgt.permute(2, 0, 1))
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
train_data['image_tgt_blob_depth'][i].copy_(pcloud_tensor[:, :, :3].permute(2, 0, 1))
train_data['pcloud_tgt_cuda'].copy_(torch.mul(pcloud_tensor[:, :, :3], mask_tensor))
# 3D points and target RT
x3d = np.ones((4, points_all.shape[1]), dtype=np.float32)
x3d[0, :] = points_all[int(pose_tgt_blob[i, 1]),:,0]
x3d[1, :] = points_all[int(pose_tgt_blob[i, 1]),:,1]
x3d[2, :] = points_all[int(pose_tgt_blob[i, 1]),:,2]
RT_tgt[:3, :3] = quat2mat(pose_tgt_blob[i, 2:6])
RT_tgt[:, 3]= pose_tgt_blob[i, 6:]
# render source pose
qt[:3] = pose_src_blob[i, 6:]
qt[3:] = pose_src_blob[i, 2:6]
cfg.renderer.set_poses([qt])
cfg.renderer.render([cls_index], image_tensor, seg_tensor, pc2_tensor=pcloud_tensor)
image_tensor = image_tensor.flip(0)
pcloud_tensor = pcloud_tensor.flip(0)
im_src = image_tensor[:, :, (2, 1, 0)] - pixel_mean
train_data['image_src_blob_color'][i].copy_(im_src.permute(2, 0, 1))
train_data['pcloud_src_cuda'].copy_(pcloud_tensor[:, :, :3])
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
train_data['image_src_blob_depth'][i].copy_(pcloud_tensor[:, :, :3].permute(2, 0, 1))
# compute box
RT_src[:3, :3] = quat2mat(pose_src_blob[i, 2:6])
RT_src[:, 3]= pose_src_blob[i, 6:]
x2d = np.matmul(intrinsic_matrix, np.matmul(RT_src, x3d))
x2d[0, :] = np.divide(x2d[0, :], x2d[2, :])
x2d[1, :] = np.divide(x2d[1, :], x2d[2, :])
obj_imgn_start_x = np.min(x2d[0, :])
obj_imgn_start_y = np.min(x2d[1, :])
obj_imgn_end_x = np.max(x2d[0, :])
obj_imgn_end_y = np.max(x2d[1, :])
obj_imgn_c = np.dot(intrinsic_matrix, pose_src_blob[i, 6:])
zoom_c_x = obj_imgn_c[0] / obj_imgn_c[2]
zoom_c_y = obj_imgn_c[1] / obj_imgn_c[2]
x1 = max(int(obj_imgn_start_x), 0)
y1 = max(int(obj_imgn_start_y), 0)
x2 = min(int(obj_imgn_end_x), width-1)
y2 = min(int(obj_imgn_end_y), height-1)
# mask region
left_dist = zoom_c_x - obj_imgn_start_x
right_dist = obj_imgn_end_x - zoom_c_x
up_dist = zoom_c_y - obj_imgn_start_y
down_dist = obj_imgn_end_y - zoom_c_y
crop_height = np.max([ratio * right_dist, ratio * left_dist, up_dist, down_dist]) * 2 * 1.4
crop_width = crop_height / ratio
# affine transformation for PyTorch
x1 = (zoom_c_x - crop_width / 2) * 2 / width - 1;
x2 = (zoom_c_x + crop_width / 2) * 2 / width - 1;
y1 = (zoom_c_y - crop_height / 2) * 2 / height - 1;
y2 = (zoom_c_y + crop_height / 2) * 2 / height - 1;
pts1 = np.float32([[x1, y1], [x1, y2], [x2, y1]])
pts2 = np.float32([[-1, -1], [-1, 1], [1, -1]])
affine_matrix = torch.tensor(cv2.getAffineTransform(pts2, pts1))
train_data['affine_matrices'][i].copy_(affine_matrix)
train_data['zoom_factor'][i, 0] = affine_matrix[0, 0]
train_data['zoom_factor'][i, 1] = affine_matrix[1, 1]
train_data['zoom_factor'][i, 2] = affine_matrix[0, 2]
train_data['zoom_factor'][i, 3] = affine_matrix[1, 2]
# compute optical flow on color
pose = torch.tensor(se3_mul(RT_tgt, se3_inverse(RT_src))).cuda().float()
with torch.cuda.device_of(train_data['pcloud_tgt_cuda']):
cunnex('compute_flow')(
grid=blockspergrid,
block=threadsperblock,
args=[train_data['pcloud_tgt_cuda'].data_ptr(),
train_data['pcloud_src_cuda'].data_ptr(),
train_data['flow_map_cuda'].data_ptr(),
pose.data_ptr(),
fx, fy, px, py, width, height],
stream=Stream)
train_data['flow_blob'][i].copy_(train_data['flow_map_cuda'].permute(2, 0, 1))
def render_one_poses(height, width, intrinsic_matrix, pose_blob, image_blob):
fx = intrinsic_matrix[0, 0]
fy = intrinsic_matrix[1, 1]
px = intrinsic_matrix[0, 2]
py = intrinsic_matrix[1, 2]
zfar = 6.0
znear = 0.01
num = pose_blob.shape[0]
qt = np.zeros((7, ), dtype=np.float32)
image_tensor = torch.cuda.FloatTensor(height, width, 4)
seg_tensor = torch.cuda.FloatTensor(height, width, 4)
# set renderer
cfg.renderer.set_projection_matrix(width, height, fx, fy, px, py, znear, zfar)
# render images
for i in range(num):
if cfg.MODE == 'TEST' and cfg.TEST.SYNTHESIZE == False:
cls_index = int(pose_blob[i, 1])
else:
cls_index = cfg.TRAIN.CLASSES[int(pose_blob[i, 1])]
# render target pose
qt[:3] = pose_blob[i, 6:]
qt[3:] = pose_blob[i, 2:6]
cfg.renderer.set_poses([qt])
# rendering
frame = cfg.renderer.render([cls_index], image_tensor, seg_tensor)
image_tensor = image_tensor.flip(0)
seg_tensor = seg_tensor.flip(0)
# RGB to BGR order
im = image_tensor.cpu().numpy()
im = np.clip(im, 0, 1)
im = im[:, :, (2, 1, 0)] * 255
image_blob[i] = im
def render_image(dataset, im, poses):
intrinsic_matrix = dataset._intrinsic_matrix
height = im.shape[0]
width = im.shape[1]
fx = intrinsic_matrix[0, 0]
fy = intrinsic_matrix[1, 1]
px = intrinsic_matrix[0, 2]
py = intrinsic_matrix[1, 2]
zfar = 10.0
znear = 0.01
num = poses.shape[0]
image_tensor = torch.cuda.FloatTensor(height, width, 4)
seg_tensor = torch.cuda.FloatTensor(height, width, 4)
pcloud_tensor = torch.cuda.FloatTensor(height, width, 4).detach()
# set renderer
cfg.renderer.set_light_pos([0, 0, 0])
cfg.renderer.set_light_color([2, 2, 2])
cfg.renderer.set_projection_matrix(width, height, fx, fy, px, py, znear, zfar)
# render images
cls_indexes = []
poses_all = []
for i in range(num):
if cfg.MODE == 'TEST' and cfg.TEST.SYNTHESIZE == False:
cls_index = int(poses[i, 1])
else:
cls_index = cfg.TRAIN.CLASSES[int(poses[i, 1])]
cls_indexes.append(cls_index)
qt = np.zeros((7, ), dtype=np.float32)
qt[:3] = poses[i, 6:]
qt[3:] = poses[i, 2:6]
poses_all.append(qt)
# rendering
cfg.renderer.set_poses(poses_all)
cfg.renderer.render(cls_indexes, image_tensor, seg_tensor, pc2_tensor=pcloud_tensor)
image_tensor = image_tensor.flip(0)
seg_tensor = seg_tensor.flip(0)
pcloud_tensor = pcloud_tensor.flip(0)
pcloud = pcloud_tensor[:,:,:3].cpu().numpy().reshape((-1, 3))
im_label = seg_tensor.cpu().numpy()
im_label = im_label[:, :, (2, 1, 0)] * 255
im_label = np.round(im_label).astype(np.uint8)
im_label = np.clip(im_label, 0, 255)
im_label, im_label_all = dataset.process_label_image(im_label)
# RGB to BGR order
im_render = image_tensor.cpu().numpy()
im_render = np.clip(im_render, 0, 1)
im_render = im_render[:, :, :3] * 255
im_render = im_render.astype(np.uint8)
im_output = 0.8 * im[:,:,(2, 1, 0)].astype(np.float32) + 1.0 * im_render.astype(np.float32)
im_output = np.clip(im_output, 0, 255)
im_output = im_output.astype(np.uint8)
return im_output, im_label, pcloud
def initialize_poses(sample):
pose_result = sample['poses_result'].numpy()
roi_result = sample['rois_result'].numpy()
if cfg.TEST.VISUALIZE:
print('use posecnn result')
print(pose_result)
# construct poses target
pose_est = np.zeros((0, 9), dtype=np.float32)
roi_est = np.zeros((0, 7), dtype=np.float32)
for i in range(pose_result.shape[0]):
for j in range(pose_result.shape[1]):
pose_result[i, j, 0] = i
pose_est = np.concatenate((pose_est, pose_result[i, j, :].reshape(1, 9)), axis=0)
roi_result[i, j, 0] = i
roi_est = np.concatenate((roi_est, roi_result[i, j, :].reshape(1, 7)), axis=0)
return pose_est, roi_est
# perturb target poses
def sample_poses(pose_tgt):
pose_src = pose_tgt.copy()
num = pose_tgt.shape[0]
for i in range(num):
euler = quat2euler(pose_tgt[i, 2:6])
euler += cfg.TRAIN.SYN_STD_ROTATION * np.random.randn(3) * math.pi / 180.0
pose_src[i, 2:6] = euler2quat(euler[0], euler[1], euler[2])
pose_src[i, 6] += cfg.TRAIN.SYN_STD_TRANSLATION * np.random.randn(1)
pose_src[i, 7] += cfg.TRAIN.SYN_STD_TRANSLATION * np.random.randn(1)
pose_src[i, 8] += 5 * cfg.TRAIN.SYN_STD_TRANSLATION * np.random.randn(1)
return pose_src
def process_sample(sample, poses_est, train_data):
# image_blob is already in tensor GPU
if cfg.MODE == 'TEST' and poses_est.shape[0] != 0:
pose_blob = sample['poses_result'].numpy()
else:
pose_blob = sample['poses'].numpy()
gt_boxes = sample['gt_boxes'].numpy()
image_color_blob = sample['image_color']
image_depth_blob = sample['image_depth']
meta_data_blob = sample['meta_data'].numpy()
label_blob = sample['label_blob'].numpy()
extents = np.tile(sample['extents'][0, :, :].numpy(), (cfg.TRAIN.GPUNUM, 1, 1))
points = np.tile(sample['points'][0, :, :, :].numpy(), (cfg.TRAIN.GPUNUM, 1, 1, 1))
num_classes = points.shape[1]
# construct poses target
pose_tgt_blob = np.zeros((0, 9), dtype=np.float32)
for i in range(pose_blob.shape[0]):
for j in range(pose_blob.shape[1]):
if pose_blob[i, j, -1] > 0:
pose_blob[i, j, 0] = i
pose_tgt_blob = np.concatenate((pose_tgt_blob, pose_blob[i, j, :].reshape(1, 9)), axis=0)
# construct gt box
gt_box_blob = np.zeros((0, 5), dtype=np.float32)
pose_blob_gt = sample['poses'].numpy()
for i in range(pose_blob_gt.shape[0]):
for j in range(pose_blob_gt.shape[1]):
if pose_blob_gt[i, j, -1] > 0:
gt_box_blob = np.concatenate((gt_box_blob, gt_boxes[i, j, :].reshape(1, 5)), axis=0)
num = pose_tgt_blob.shape[0]
height = image_color_blob.shape[1]
width = image_color_blob.shape[2]
metadata = meta_data_blob[0, :]
intrinsic_matrix = metadata[:9].reshape((3,3))
weights_rot = np.zeros((num, num_classes * 4), dtype=np.float32)
for i in range(num):
cls = int(pose_tgt_blob[i, 1])
weights_rot[i, 4*cls:4*cls+4] = 1.0
if poses_est.shape[0] == 0:
# sample source poses
pose_src_blob = sample_poses(pose_tgt_blob)
else:
pose_src_blob = poses_est.copy()
render_poses(intrinsic_matrix, label_blob, pose_tgt_blob, pose_src_blob, points[0], train_data)
for i in range(num):
image_id = int(pose_tgt_blob[i, 0])
train_data['image_real_blob_color'][i].copy_(image_color_blob[image_id].permute(2, 0, 1))
if pose_src_blob[i, 2] < 0:
pose_src_blob[i, 2:6] = -1 * pose_src_blob[i, 2:6]
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
train_data['image_real_blob_depth'][i].copy_(image_depth_blob[image_id].permute(2, 0, 1))
vis_data = {'image': image_color_blob,
'image_src': train_data['image_src_blob_color'],
'image_tgt': train_data['image_tgt_blob_color'],
'image_depth': image_depth_blob,
'image_src_depth': train_data['image_src_blob_depth'],
'image_tgt_depth': train_data['image_tgt_blob_depth'],
'flow': train_data['flow_blob'],
'intrinsic_matrix': intrinsic_matrix,
'gt_boxes': gt_box_blob,
'pose_src': pose_src_blob,
'pose_tgt': pose_tgt_blob}
# construct outputs
train_data['input_blob_color'][:, :3, :, :] = train_data['image_src_blob_color']
train_data['input_blob_color'][:, 3:6, :, :] = train_data['image_real_blob_color']
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
train_data['input_blob_depth'][:, :3, :, :] = train_data['image_src_blob_depth']
train_data['input_blob_depth'][:, 3:6, :, :] = train_data['image_real_blob_depth']
return train_data['input_blob_color'][:num], train_data['input_blob_depth'][:num], \
train_data['flow_blob'][:num], torch.from_numpy(pose_src_blob).contiguous(), \
torch.from_numpy(pose_tgt_blob).contiguous(), torch.from_numpy(weights_rot).contiguous(), \
torch.from_numpy(extents).contiguous(), torch.from_numpy(points).contiguous(), \
train_data['affine_matrices'][:num], train_data['zoom_factor'][:num], vis_data
def train(train_loader, background_loader, network, optimizer, epoch, num_iterations):
batch_time = AverageMeter()
data_time = AverageMeter()
epoch_size = len(train_loader)
enum_background = enumerate(background_loader)
# declare tensors
num = cfg.TRAIN.IMS_PER_BATCH * cfg.TRAIN.MAX_OBJECT_PER_IMAGE
height = cfg.TRAIN.SYN_HEIGHT
width = cfg.TRAIN.SYN_WIDTH
input_blob_color = torch.cuda.FloatTensor(num, 6, height, width).detach()
image_real_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_tgt_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_src_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
input_blob_depth = torch.cuda.FloatTensor(num, 6, height, width).detach()
image_real_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_tgt_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_src_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
affine_matrices = torch.cuda.FloatTensor(num, 2, 3).detach()
zoom_factor = torch.cuda.FloatTensor(num, 4).detach()
flow_blob = torch.cuda.FloatTensor(num, 2, height, width).detach()
pcloud_tgt_cuda = torch.cuda.FloatTensor(height, width, 3).detach()
pcloud_src_cuda = torch.cuda.FloatTensor(height, width, 3).detach()
flow_map_cuda = torch.cuda.FloatTensor(height, width, 2).detach()
train_data = {'input_blob_color': input_blob_color,
'image_real_blob_color': image_real_blob_color,
'image_tgt_blob_color': image_tgt_blob_color,
'image_src_blob_color': image_src_blob_color,
'input_blob_depth': input_blob_depth,
'image_real_blob_depth': image_real_blob_depth,
'image_tgt_blob_depth': image_tgt_blob_depth,
'image_src_blob_depth': image_src_blob_depth,
'affine_matrices': affine_matrices,
'zoom_factor': zoom_factor,
'flow_blob': flow_blob,
'pcloud_tgt_cuda': pcloud_tgt_cuda,
'pcloud_src_cuda': pcloud_src_cuda,
'flow_map_cuda': flow_map_cuda}
# switch to train mode
network.train()
cfg.ITERS = 0
for i, sample in enumerate(train_loader):
poses_est = np.zeros((0, 9), dtype=np.float32)
# add background
try:
_, background = next(enum_background)
except:
enum_background = enumerate(background_loader)
_, background = next(enum_background)
if sample['image_color'].size(0) != background['background_color'].size(0):
enum_background = enumerate(background_loader)
_, background = next(enum_background)
im_info = sample['im_info']
mask = sample['mask']
if cfg.INPUT == 'COLOR':
background_color = background['background_color'].permute(0, 2, 3, 1)
for j in range(sample['image_color'].size(0)):
is_syn = im_info[j, -1]
if is_syn or np.random.rand(1) > 0.5:
sample['image_color'][j] = mask[j] * sample['image_color'][j] + (1 - mask[j]) * background_color[j]
elif cfg.INPUT == 'RGBD':
background_color = background['background_color'].permute(0, 2, 3, 1)
background_depth = background['background_depth'].permute(0, 2, 3, 1)
for j in range(sample['image_color'].size(0)):
is_syn = im_info[j, -1]
if is_syn or np.random.rand(1) > 0.5:
sample['image_color'][j] = mask[j] * sample['image_color'][j] + (1 - mask[j]) * background_color[j]
sample['image_depth'][j] = mask[j] * sample['image_depth'][j] + (1 - mask[j]) * background_depth[j]
# train multiple iterations
for j in range(num_iterations):
end = time.time()
inputs, inputs_depth, flow, poses_src, poses_tgt, \
weights_rot, extents, points, affine_matrices, zoom_factor, vdata = \
process_sample(sample, poses_est, train_data)
data_time.update(time.time() - end)
# measure data loading time
poses_src = poses_src.cuda().detach()
poses_tgt = poses_tgt.cuda().detach()
weights_rot = weights_rot.cuda().detach()
extents = extents.cuda().detach()
points = points.cuda().detach()
# zoom in image
grids = nn.functional.affine_grid(affine_matrices, inputs.size())
input_zoom = nn.functional.grid_sample(inputs, grids).detach()
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
input_zoom_depth = nn.functional.grid_sample(inputs_depth, grids).detach()
# zoom in flow
flow_zoom = nn.functional.grid_sample(flow, grids)
for k in range(flow_zoom.shape[0]):
flow_zoom[k, 0, :, :] /= affine_matrices[k, 0, 0] * 20.0
flow_zoom[k, 1, :, :] /= affine_matrices[k, 1, 1] * 20.0
if cfg.TRAIN.VISUALIZE:
if cfg.INPUT == 'COLOR':
_vis_minibatch(inputs, input_zoom, flow_zoom, extents[0], vdata, 'COLOR')
elif cfg.INPUT == 'DEPTH':
_vis_minibatch(inputs_depth, input_zoom_depth, flow_zoom, extents[0], vdata, 'DEPTH')
elif cfg.INPUT == 'RGBD':
_vis_minibatch(inputs, input_zoom, flow_zoom, extents[0], vdata, 'COLOR')
_vis_minibatch(inputs_depth, input_zoom_depth, flow_zoom, extents[0], vdata, 'DEPTH')
# compute output
if cfg.INPUT == 'RGBD':
output, loss_pose_tensor, quaternion_delta_var, translation_var \
= network(input_zoom, input_zoom_depth, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
else:
if cfg.INPUT == 'COLOR':
x = input_zoom
elif cfg.INPUT == 'DEPTH':
x = input_zoom_depth
output, loss_pose_tensor, quaternion_delta_var, translation_var \
= network(x, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
# compose pose
vdata_pose = vdata['pose_src']
quaternion_delta = quaternion_delta_var.cpu().detach().numpy()
translation = translation_var.cpu().detach().numpy()
poses_est, error_rot, error_trans = _compute_pose_target(quaternion_delta, translation, vdata_pose, vdata['pose_tgt'])
# losses
loss_pose = torch.mean(loss_pose_tensor)
loss_flow = 0.1 * multiscaleEPE(output, flow_zoom)
flow2_EPE = realEPE(output[0], flow_zoom)
loss = loss_pose + loss_flow
# compute gradient and do optimization step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
print('epoch: [%d/%d][%d/%d], iter %d, loss %.4f, l_pose %.4f (%.2f, %.2f), l_flow %.4f, lr %.6f, data time %.2f, batch time %.2f' \
% (epoch, cfg.epochs, i, epoch_size, j+1, loss, loss_pose, error_rot, error_trans, loss_flow, \
optimizer.param_groups[0]['lr'], data_time.val, batch_time.val))
cfg.ITERS += 1
def test(test_loader, background_loader, network, output_dir):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
flow2_EPEs = AverageMeter()
epoch_size = len(test_loader)
num_iterations = cfg.TEST.ITERNUM
if background_loader is not None:
enum_background = enumerate(background_loader)
# declare tensors
num = cfg.TEST.IMS_PER_BATCH * len(cfg.TRAIN.CLASSES)
height = cfg.TRAIN.SYN_HEIGHT
width = cfg.TRAIN.SYN_WIDTH
input_blob_color = torch.cuda.FloatTensor(num, 6, height, width).detach()
image_real_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_tgt_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_src_blob_color = torch.cuda.FloatTensor(num, 3, height, width).detach()
input_blob_depth = torch.cuda.FloatTensor(num, 6, height, width).detach()
image_real_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_tgt_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
image_src_blob_depth = torch.cuda.FloatTensor(num, 3, height, width).detach()
affine_matrices = torch.cuda.FloatTensor(num, 2, 3).detach()
zoom_factor = torch.cuda.FloatTensor(num, 4).detach()
flow_blob = torch.cuda.FloatTensor(num, 2, height, width).detach()
pcloud_tgt_cuda = torch.cuda.FloatTensor(height, width, 3).detach()
pcloud_src_cuda = torch.cuda.FloatTensor(height, width, 3).detach()
flow_map_cuda = torch.cuda.FloatTensor(height, width, 2).detach()
test_data = {'input_blob_color': input_blob_color,
'image_real_blob_color': image_real_blob_color,
'image_tgt_blob_color': image_tgt_blob_color,
'image_src_blob_color': image_src_blob_color,
'input_blob_depth': input_blob_depth,
'image_real_blob_depth': image_real_blob_depth,
'image_tgt_blob_depth': image_tgt_blob_depth,
'image_src_blob_depth': image_src_blob_depth,
'affine_matrices': affine_matrices,
'zoom_factor': zoom_factor,
'flow_blob': flow_blob,
'pcloud_tgt_cuda': pcloud_tgt_cuda,
'pcloud_src_cuda': pcloud_src_cuda,
'flow_map_cuda': flow_map_cuda}
# switch to test mode
network.eval()
cfg.ITERS = 0
end = time.time()
for i, sample in enumerate(test_loader):
if 'is_testing' in sample and sample['is_testing'] == 0:
continue
result = []
vis_data = []
if cfg.TEST.SYNTHESIZE:
# random initial poses
poses_est = np.zeros((0, 9), dtype=np.float32)
rois_est = np.zeros((0, 7), dtype=np.float32)
else:
# initialize poses from detection
poses_est, rois_est = initialize_poses(sample)
if poses_est.shape[0] == 0:
continue
# add background for testing on synthetic data
if background_loader is not None:
try:
_, background = next(enum_background)
except:
enum_background = enumerate(background_loader)
_, background = next(enum_background)
if sample['image_color'].size(0) != background['background_color'].size(0):
enum_background = enumerate(background_loader)
_, background = next(enum_background)
im_info = sample['im_info']
mask = sample['mask']
if cfg.INPUT == 'COLOR':
background_color = background['background_color'].permute(0, 2, 3, 1)
for j in range(sample['image_color'].size(0)):
is_syn = im_info[j, -1]
if is_syn or np.random.rand(1) > 0.5:
sample['image_color'][j] = mask[j] * sample['image_color'][j] + (1 - mask[j]) * background_color[j]
elif cfg.INPUT == 'RGBD':
background_color = background['background_color'].permute(0, 2, 3, 1)
background_depth = background['background_depth'].permute(0, 2, 3, 1)
for j in range(sample['image_color'].size(0)):
is_syn = im_info[j, -1]
if is_syn or np.random.rand(1) > 0.5:
sample['image_color'][j] = mask[j] * sample['image_color'][j] + (1 - mask[j]) * background_color[j]
sample['image_depth'][j] = mask[j] * sample['image_depth'][j] + (1 - mask[j]) * background_depth[j]
for j in range(num_iterations):
inputs, inputs_depth, flow, poses_src, poses_tgt, \
weights_rot, extents, points, affine_matrices, zoom_factor, vdata = \
process_sample(sample, poses_est, test_data)
vis_data.append(copy.deepcopy(vdata))
if j == 0:
poses_init = vdata['pose_src']
poses_src = poses_src.cuda().detach()
poses_tgt = poses_tgt.cuda().detach()
weights_rot = weights_rot.cuda().detach()
extents = extents.cuda().detach()
points = points.cuda().detach()
# zoom in image
grids = nn.functional.affine_grid(affine_matrices, inputs.size())
input_zoom = nn.functional.grid_sample(inputs, grids).detach()
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
input_zoom_depth = nn.functional.grid_sample(inputs_depth, grids).detach()
# compute output
if cfg.INPUT == 'RGBD':
quaternion_delta_var, translation_var \
= network(input_zoom, input_zoom_depth, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
else:
if cfg.INPUT == 'COLOR':
x = input_zoom
elif cfg.INPUT == 'DEPTH':
x = input_zoom_depth
quaternion_delta_var, translation_var \
= network(x, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
# compose pose
vdata_pose = vdata['pose_src']
quaternion_delta = quaternion_delta_var.detach().cpu().numpy()
translation = translation_var.detach().cpu().numpy()
poses_est, error_rot, error_trans = _compute_pose_target(quaternion_delta, translation, vdata_pose, vdata['pose_tgt'])
result.append(poses_est)
pose = {'poses_init': poses_init, 'poses_est': result, 'rois': rois_est, \
'poses_tgt': vdata['pose_tgt'], 'intrinsic_matrix': vdata['intrinsic_matrix']}
if cfg.TEST.VISUALIZE:
if cfg.INPUT == 'RGBD':
_vis_test(result, vis_data, 'color')
_vis_test(result, vis_data, 'depth')
elif cfg.INPUT == 'COLOR':
_vis_test(result, vis_data, 'color')
else:
_vis_test(result, vis_data, 'depth')
else:
# save result
if 'video_id' in sample and 'image_id' in sample:
filename = os.path.join(output_dir, sample['video_id'][0] + '_' + sample['image_id'][0] + '.mat')
else:
filename = os.path.join(output_dir, '%06d.mat' % i)
print(filename)
scipy.io.savemat(filename, pose, do_compression=True)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
cfg.ITERS += 1
print('[%d/%d] %.4f' % (i, epoch_size, batch_time.val))
filename = os.path.join(output_dir, 'results_deepim.mat')
if os.path.exists(filename):
os.remove(filename)
def test_image(network, dataset, im_color, im_depth, poses_est, test_data):
# declare tensors
batch_time = AverageMeter()
num_iterations = cfg.TEST.ITERNUM
num = poses_est.shape[0]
height = cfg.TRAIN.SYN_HEIGHT
width = cfg.TRAIN.SYN_WIDTH
# construct sample
im_tensor = torch.from_numpy(im_color).float() / 255.0
im_tensor -= dataset._pixel_mean
im_cuda_color = im_tensor.cuda()
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
pcloud = backproject(im_depth, dataset._intrinsic_matrix, is_reshape=True)
im_cuda_depth = torch.from_numpy(pcloud).cuda().float()
else:
im_cuda_depth = im_cuda_color.clone().detach()
# construct the meta data
K = dataset._intrinsic_matrix
Kinv = np.linalg.pinv(K)
meta_data_blob = np.zeros(18, dtype=np.float32)
meta_data_blob[0:9] = K.flatten()
meta_data_blob[9:18] = Kinv.flatten()
label_blob = np.zeros((dataset.num_classes, height, width), dtype=np.float32)
gt_boxes = np.zeros((num, 5), dtype=np.float32)
im_info = np.array([im_color.shape[0], im_color.shape[1], cfg.TRAIN.SCALES_BASE[0]], dtype=np.float32)
sample = {'image_color': im_cuda_color.unsqueeze(0),
'image_depth': im_cuda_depth.unsqueeze(0),
'meta_data': torch.from_numpy(meta_data_blob[np.newaxis,:]),
'label_blob': torch.from_numpy(label_blob[np.newaxis,:]),
'poses': torch.from_numpy(poses_est[np.newaxis,:]),
'extents': torch.from_numpy(dataset._extents[np.newaxis,:]),
'points': torch.from_numpy(dataset._point_blob[np.newaxis,:]),
'gt_boxes': torch.from_numpy(gt_boxes[np.newaxis,:]),
'poses_result': torch.from_numpy(poses_est[np.newaxis,:]),
'im_info': torch.from_numpy(im_info[np.newaxis,:])}
# switch to test mode
network.eval()
end = time.time()
result = []
vis_data = []
for j in range(num_iterations):
inputs, inputs_depth, flow, poses_src, poses_tgt, \
weights_rot, extents, points, affine_matrices, \
zoom_factor, vdata = process_sample(sample, poses_est, test_data)
vis_data.append(copy.deepcopy(vdata))
if j == 0:
poses_init = vdata['pose_src']
poses_src = poses_src.cuda().detach()
poses_tgt = poses_tgt.cuda().detach()
weights_rot = weights_rot.cuda().detach()
extents = extents.cuda().detach()
points = points.cuda().detach()
# zoom in image
grids = nn.functional.affine_grid(affine_matrices, inputs.size())
input_zoom = nn.functional.grid_sample(inputs, grids).detach()
if cfg.INPUT == 'DEPTH' or cfg.INPUT == 'RGBD':
input_zoom_depth = nn.functional.grid_sample(inputs_depth, grids).detach()
# compute output
if cfg.INPUT == 'RGBD':
quaternion_delta_var, translation_var \
= network(input_zoom, input_zoom_depth, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
else:
if cfg.INPUT == 'COLOR':
x = input_zoom
elif cfg.INPUT == 'DEPTH':
x = input_zoom_depth
quaternion_delta_var, translation_var \
= network(x, weights_rot, poses_src, poses_tgt, extents, points, zoom_factor)
# compose pose
vdata_pose = vdata['pose_src']
quaternion_delta = quaternion_delta_var.detach().cpu().numpy()
translation = translation_var.detach().cpu().numpy()
poses_est, error_rot, error_trans = _compute_pose_target(quaternion_delta, translation, vdata_pose, vdata['pose_tgt'])
result.append(poses_est)
pose = {'poses_init': poses_init, 'poses_est': result, 'poses_tgt': vdata['pose_tgt'], 'intrinsic_matrix': vdata['intrinsic_matrix']}
# render result
im_pose, im_label, pcloud = render_image(dataset, im_color, result[-1])
if cfg.TEST.VISUALIZE:
if cfg.INPUT == 'RGBD':
_vis_test(result, vis_data, 'color')
_vis_test(result, vis_data, 'depth')
elif cfg.INPUT == 'COLOR':
_vis_test(result, vis_data, 'color')
else:
_vis_test(result, vis_data, 'depth')
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
print('time %.4f' % (batch_time.val))
return im_pose, pose
# backproject pixels into 3D points in camera's coordinate system
def backproject(depth_cv, intrinsic_matrix, is_reshape=False):
depth = depth_cv.astype(np.float32, copy=True)
# get intrinsic matrix
K = intrinsic_matrix
Kinv = np.linalg.inv(K)
# compute the 3D points
width = depth.shape[1]
height = depth.shape[0]
# construct the 2D points matrix
x, y = np.meshgrid(np.arange(width), np.arange(height))
ones = np.ones((height, width), dtype=np.float32)
x2d = np.stack((x, y, ones), axis=2).reshape(width*height, 3)
# backprojection
R = np.dot(Kinv, x2d.transpose())
# compute the 3D points
X = np.multiply(np.tile(depth.reshape(1, width*height), (3, 1)), R)
if is_reshape:
index = np.where(np.isnan(X))
X[index[0], index[1]] = 0
return np.array(X).transpose().reshape((height, width, 3))
else:
return np.array(X).transpose()
def refine_pose(im_label, im_depth, pose_result, pcloud):
# backprojection
intrinsic_matrix = pose_result['intrinsic_matrix']
dpoints = backproject(im_depth, intrinsic_matrix)
# renderer
poses = pose_result['poses_est'][-1]
num = poses.shape[0]
width = im_depth.shape[1]
height = im_depth.shape[0]
im_label = im_label.reshape((width * height, ))
# refine pose
for i in range(num):
cls = int(poses[i, 1]) + 1
index = np.where((im_label == cls) & np.isfinite(dpoints[:, 0]) & (pcloud[:, 0] != 0))[0]
if len(index) > 10:
T = np.mean(dpoints[index, :] - pcloud[index, :], axis=0)
poses[i, 8] += T[2]
else:
print('no pose refinement')
pose_result['poses_est'][-1] = poses
return pose_result
def overlay_image(dataset, im, poses):
im = im[:, :, (2, 1, 0)]
classes = dataset._classes
class_colors = dataset._class_colors
points = dataset._points_all
intrinsic_matrix = dataset._intrinsic_matrix
height = im.shape[0]
width = im.shape[1]
for j in range(poses.shape[0]):
cls = int(poses[j, 1])
print(classes[cls])
if cls >= 0:
# extract 3D points
x3d = np.ones((4, points.shape[1]), dtype=np.float32)
x3d[0, :] = points[cls,:,0]
x3d[1, :] = points[cls,:,1]
x3d[2, :] = points[cls,:,2]
# projection
RT = np.zeros((3, 4), dtype=np.float32)
RT[:3, :3] = quat2mat(poses[j, 2:6])
RT[:, 3] = poses[j, 6:]
x2d = np.matmul(intrinsic_matrix, np.matmul(RT, x3d))
x = np.round(np.divide(x2d[0, :], x2d[2, :]))
y = np.round(np.divide(x2d[1, :], x2d[2, :]))
index = np.where((x >= 0) & (x < width) & (y >= 0) & (y < height))[0]
x = x[index].astype(np.int32)
y = y[index].astype(np.int32)
im[y, x, 0] = class_colors[cls][0]
im[y, x, 1] = class_colors[cls][1]
im[y, x, 2] = class_colors[cls][2]
return im
def _compute_pose_target(quaternion_delta, translation, poses_src, poses_gt):
poses_tgt = poses_src.copy()
num = poses_src.shape[0]
errors_rot = np.zeros((num, ), dtype=np.float32)
errors_trans = np.zeros((num, ), dtype=np.float32)
for i in range(poses_src.shape[0]):
cls = int(poses_src[i, 1])
poses_tgt[i, 2:6] = mat2quat(np.dot(quat2mat(quaternion_delta[i, 4*cls:4*cls+4]), quat2mat(poses_src[i, 2:6])))
poses_tgt[i, 6:] = translation[i, 3*cls:3*cls+3]
# compute pose errors
errors_rot[i] = np.arccos(2 * np.power(np.dot(poses_tgt[i, 2:6], poses_gt[i, 2:6]), 2) - 1) * 180.0 / np.pi
errors_trans[i] = np.linalg.norm(poses_tgt[i, 6:] - poses_gt[i, 6:]) * 100
return poses_tgt, np.mean(errors_rot), np.mean(errors_trans)
def _get_bb3D(extent):
bb = np.zeros((3, 8), dtype=np.float32)
xHalf = extent[0] * 0.5
yHalf = extent[1] * 0.5
zHalf = extent[2] * 0.5
bb[:, 0] = [xHalf, yHalf, zHalf]
bb[:, 1] = [-xHalf, yHalf, zHalf]
bb[:, 2] = [xHalf, -yHalf, zHalf]
bb[:, 3] = [-xHalf, -yHalf, zHalf]
bb[:, 4] = [xHalf, yHalf, -zHalf]
bb[:, 5] = [-xHalf, yHalf, -zHalf]
bb[:, 6] = [xHalf, -yHalf, -zHalf]
bb[:, 7] = [-xHalf, -yHalf, -zHalf]
return bb
def _vis_test(result, vis_data, input_type):
num_iter = len(result)
pose_blob = vis_data[0]['pose_tgt']
num_obj = pose_blob.shape[0]
if input_type == 'color':
im_blob = vis_data[0]['image'].cpu().numpy()
else:
im_blob = vis_data[0]['image_depth'].cpu().numpy()
import matplotlib.pyplot as plt
for j in range(num_obj):
image_id = int(pose_blob[j, 0])
fig = plt.figure()
# show input image
im_input = im_blob[image_id, :, :, :].copy()
im_input *= 255
im_input += cfg.PIXEL_MEANS
im_input = im_input[:, :, (2, 1, 0)]
im_input = im_input.astype(np.uint8)
ax = fig.add_subplot(3, num_iter, 1)
plt.imshow(im_input)
plt.axis('off')
ax.set_title('input image')
for i in range(num_iter):
poses_est = result[i]
intrinsic_matrix = vis_data[i]['intrinsic_matrix']
poses_src = vis_data[i]['pose_src']
if input_type == 'color':
image_src_blob = vis_data[i]['image_src'].permute(0, 2, 3, 1).cpu().numpy()
image_tgt_blob = vis_data[i]['image_tgt'].permute(0, 2, 3, 1).cpu().numpy()
else:
image_src_blob = vis_data[i]['image_src_depth'].permute(0, 2, 3, 1).cpu().numpy()
image_tgt_blob = vis_data[i]['image_tgt_depth'].permute(0, 2, 3, 1).cpu().numpy()
poses_tgt = vis_data[i]['pose_tgt']
height = image_src_blob.shape[1]
width = image_src_blob.shape[2]
# images in BGR order
num = poses_est.shape[0]
images_est = np.zeros((num, height, width, 3), dtype=np.float32)
render_one_poses(height, width, intrinsic_matrix, poses_est, images_est)
images_est = images_est.astype(np.uint8)
# compute error
R_est = quat2mat(poses_est[j, 2:6])
R_src = quat2mat(poses_src[j, 2:6])
R_tgt = quat2mat(poses_tgt[j, 2:6])
error_rot_src = re(R_src, R_tgt)
error_rot_est = re(R_est, R_tgt)
T_est = poses_est[j, 6:]
T_src = poses_src[j, 6:]
T_tgt = poses_tgt[j, 6:]
error_tran_src = te(T_src, T_tgt)
error_tran_est = te(T_est, T_tgt)
# show rendered images
im = image_src_blob[j, :, :, :].copy()
im *= 255
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
ax = fig.add_subplot(3, num_iter, num_iter + 1 + i)
ax.set_title('source iter %d (rot %.2f, tran %.4f)' % (i+1, error_rot_src, error_tran_src))
plt.imshow(im)
plt.axis('off')
if i == 0:
im = image_tgt_blob[j, :, :, :3].copy()
im *= 255
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
im_output = 1.0 * im.astype(np.float32) + 0.5 * im_input.astype(np.float32)
im_output = np.clip(im_output, 0, 255)
im_output = im_output.astype(np.uint8)
ax = fig.add_subplot(3, num_iter, 2)
if cfg.TEST.SYNTHESIZE:
ax.set_title('target pose')
else:
ax.set_title('initial pose')
plt.imshow(im_output)
plt.axis('off')
# show estimated image
im = images_est[j, :, :, :3].copy()
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
im_output = 1.0 * im.astype(np.float32) + 0.5 * im_input.astype(np.float32)
im_output = np.clip(im_output, 0, 255)
im_output = im_output.astype(np.uint8)
ax = fig.add_subplot(3, num_iter, 2 * num_iter + 1 + i)
ax.set_title('estimate iter %d (rot %.2f, tran %.4f)' % (i+1, error_rot_est, error_tran_est))
plt.imshow(im_output)
plt.axis('off')
plt.show()
def convert_to_image(im_blob):
return np.clip(255 * im_blob, 0, 255).astype(np.uint8)
def _vis_minibatch(input_var, input_zoom, flow_zoom, extents, vdata, input_format='COLOR'):
"""Visualize a mini-batch for debugging."""
import matplotlib.pyplot as plt
pose_blob = vdata['pose_tgt']
pose_src = vdata['pose_src']
if input_format == 'COLOR':
im_blob = vdata['image'].cpu().numpy()
image_src_blob = vdata['image_src'].permute(0, 2, 3, 1).cpu().numpy()
image_tgt_blob = vdata['image_tgt'].permute(0, 2, 3, 1).cpu().numpy()
else:
im_blob = vdata['image_depth'].cpu().numpy()
image_src_blob = vdata['image_src_depth'].permute(0, 2, 3, 1).cpu().numpy()
image_tgt_blob = vdata['image_tgt_depth'].permute(0, 2, 3, 1).cpu().numpy()
intrinsic_matrix = vdata['intrinsic_matrix']
gt_boxes = vdata['gt_boxes']
flow_blob = vdata['flow'].permute(0, 2, 3, 1).cpu().numpy()
for j in range(pose_blob.shape[0]):
image_id = int(pose_blob[j, 0])
fig = plt.figure()
# compute pose distances
error_rot = np.arccos(2 * np.power(np.dot(pose_blob[j, 2:6], pose_src[j, 2:6]), 2) - 1) * 180.0 / np.pi
error_trans = np.linalg.norm(pose_blob[j, 6:] - pose_src[j, 6:]) * 100
s = 'rot %.2f, trans %.2f' % (error_rot, error_trans)
# show image
im = im_blob[image_id, :, :, :].copy()
if input_format == 'COLOR':
im *= 255
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
else:
im = im[:, :, 2]
ax = fig.add_subplot(4, 3, 1)
plt.imshow(im)
ax.set_title(s)
plt.axis('off')
# show projection box
class_id = int(pose_blob[j, 1])
bb3d = _get_bb3D(extents[class_id, :])
x3d = np.ones((4, 8), dtype=np.float32)
x3d[0:3, :] = bb3d
RT = np.zeros((3, 4), dtype=np.float32)
RT[:3, :3] = quat2mat(pose_blob[j, 2:6])
RT[:, 3] = pose_blob[j, 6:]
x2d = np.matmul(intrinsic_matrix, np.matmul(RT, x3d))
x2d[0, :] = np.divide(x2d[0, :], x2d[2, :])
x2d[1, :] = np.divide(x2d[1, :], x2d[2, :])
x1 = np.min(x2d[0, :])
x2 = np.max(x2d[0, :])
y1 = np.min(x2d[1, :])
y2 = np.max(x2d[1, :])
plt.gca().add_patch(
plt.Rectangle((x1, y1), x2-x1, y2-y1, fill=False, edgecolor='g', linewidth=3))
# show gt box
ax = fig.add_subplot(4, 3, 2)
plt.imshow(im)
ax.set_title('gt box')
x1 = gt_boxes[j, 0]
y1 = gt_boxes[j, 1]
x2 = gt_boxes[j, 2]
y2 = gt_boxes[j, 3]
plt.gca().add_patch(
plt.Rectangle((x1, y1), x2-x1, y2-y1, fill=False, edgecolor='g', linewidth=3))
plt.axis('off')
# show rendered images
if input_format == 'COLOR':
im = image_src_blob[j, :, :, :].copy()
im *= 255
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
else:
im = image_src_blob[j, :, :, 2].copy()
ax = fig.add_subplot(4, 3, 4)
ax.set_title('source image render')
plt.imshow(im)
plt.axis('off')
if input_format == 'COLOR':
im = image_tgt_blob[j, :, :, :3].copy()
im *= 255
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
else:
im = image_tgt_blob[j, :, :, 2].copy()
ax = fig.add_subplot(4, 3, 5)
ax.set_title('target image render')
plt.imshow(im)
plt.axis('off')
# show zoomed images
if input_format == 'COLOR':
im = input_zoom[j, :3, :, :].cpu().numpy()
im = 255 * np.transpose(im, (1, 2, 0))
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
else:
im = input_zoom[j, 2, :, :].cpu().numpy()
ax = fig.add_subplot(4, 3, 3)
ax.set_title('zoomed source image')
plt.imshow(im)
plt.axis('off')
if input_format == 'COLOR':
im = input_zoom[j, 3:6, :, :].cpu().numpy()
im = 255 * np.transpose(im, (1, 2, 0))
im += cfg.PIXEL_MEANS
im = np.clip(im, 0, 255)
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
else:
im = input_zoom[j, 5, :, :].cpu().numpy()
ax = fig.add_subplot(4, 3, 6)
ax.set_title('zoomed target image')
plt.imshow(im)
plt.axis('off')
# show flow
flow = flow_blob[j, :, :, 0]
ax = fig.add_subplot(4, 3, 7)
ax.set_title('flow x')
plt.imshow(flow)
plt.axis('off')
flow = flow_blob[j, :, :, 1]
ax = fig.add_subplot(4, 3, 8)
ax.set_title('flow y')
plt.imshow(flow)
plt.axis('off')
flow_image = sintel_compute_color(flow_blob[j, :, :, :])
ax = fig.add_subplot(4, 3, 9)
ax.set_title('flow image')
plt.imshow(flow_image)
plt.axis('off')
# show zoomed flow
flow = flow_zoom[j, 0, :, :].cpu().numpy()
ax = fig.add_subplot(4, 3, 10)
ax.set_title('zoomed flow x')
plt.imshow(flow)
plt.axis('off')
flow = flow_zoom[j, 1, :, :].cpu().numpy()
ax = fig.add_subplot(4, 3, 11)
ax.set_title('zoomed flow y')
plt.imshow(flow)
plt.axis('off')
flow = flow_zoom[j, :, :, :].cpu().numpy()
flow_image = sintel_compute_color(np.transpose(flow, (1, 2, 0)))
ax = fig.add_subplot(4, 3, 12)
ax.set_title('zoomed flow image')
plt.imshow(flow_image)
plt.axis('off')
plt.show()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
import requests
import json
import app.settings as settings
from typing import Dict
logger = logging.getLogger('server')
class TwitterService:
def __init__(self):
self.conn = None
def get_conversation(self,tweetId:str)->Dict:
twitter_conn = settings.get_twitter_connector()
url = "http://" + twitter_conn['host']+":"+twitter_conn['port']+"/tweets/conversation/"+tweetId
response = requests.get(url)
if not response.status_code is 200:
raise requests.HTTPError
else:
return json.loads(response.content) |
#!/usr/bin/env python
f = open('/Users/kosta/dev/advent-of-code-17/day5/input.txt')
#contents = list(map(int, f.readlines()))
contents = [int(i) for i in f.readlines()]
print (contents)
exited = False
cur = 0
step = 0
while(not exited):
next = contents[cur] + cur
if next > len(contents) - 1:
exited = True
if contents[cur] < 3:
contents[cur]+=1
else:
contents[cur]-=1
cur = next
step += 1
print(step) |
#!/usr/bin/env python
"""
Copyright (C) 2018 Intel Corporation
?
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
?
http://www.apache.org/licenses/LICENSE-2.0
?
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions
and limitations under the License.
?
SPDX-License-Identifier: Apache-2.0
"""
# standard libraries
import time
import traceback
# from warnings import warn
from testlib.base.base_step import step as base_step
from testlib.scripts.android.android_step import step as android_step
from testlib.scripts.android.ui.ui_step import step as ui_step
from testlib.scripts.android.ui import ui_utils
from testlib.scripts.android.adb.adb_step import step as adb_step
from testlib.scripts.android.adb import adb_utils
from testlib.scripts.android.ui.gms import gms_utils
from testlib.utils.statics.android import statics
from testlib.utils.connections.adb import Adb
from testlib.scripts.android.ui import uiconfig
from testlib.scripts.android.ui import ui_steps
class set_pin_screen_lock(ui_step):
""" description:
sets screen lock method to PIN <selected PIN>
if already set to PIN, it will skip
usage:
ui_steps.set_pin_screen_lock(pin = "1234")()
tags:
ui, android, click, button
"""
def __init__(self, dut_pin="1234",
require_pin_to_start_device=False, wait_time=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.require_pin_to_start_device = require_pin_to_start_device
if dut_pin:
self.dut_pin = dut_pin
else:
self.dut_pin = "1234"
if wait_time:
self.wait_time = int(wait_time)
else:
self.wait_time = 5000
def do(self):
ui_steps.open_security_settings(serial=self.serial)()
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "Screen lock"})()
if self.uidevice(text="Confirm your PIN").wait.exists(timeout=self.wait_time):
if self.device_info.confirm_pin_go_back is not None:
ui_steps.click_button(
serial=self.serial, view_to_find=self.device_info.confirm_pin_go_back)()
else:
self.uidevice.press.back()
if adb_utils.is_virtual_keyboard_on(serial=self.serial):
ui_steps.press_back(serial=self.serial)()
ui_steps.press_back(serial=self.serial)()
else:
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "PIN"})()
if self.uidevice(text="Require PIN to start device").wait.exists(timeout=self.wait_time):
if self.require_pin_to_start_device:
ui_steps.click_button(serial=self.serial, view_to_find={
"textContains": "Require PIN to start device"})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "OK"}, optional=True)()
else:
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "No thanks"})()
if self.device_info.dessert == "M":
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "Continue"})()
if self.uidevice(text="Secure start-up").wait.exists(
timeout=self.wait_time):
if self.require_pin_to_start_device:
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "YES"}, optional=True)()
else:
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "NO"}, optional=True)()
ui_steps.edit_text(serial=self.serial, view_to_find={"resourceId":
"com.android.settings:id/password_entry"},
value=self.dut_pin, is_password=True)()
ui_steps.click_button(serial=self.serial, view_to_find={
"resourceId": "com.android.settings:id/next_button"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"resourceId":
"com.android.settings:id/password_entry"},
value=self.dut_pin, is_password=True)()
ui_steps.click_button(serial=self.serial, view_to_find={
"resourceId": "com.android.settings:id/next_button"})()
ui_steps.click_button(
serial=self.serial, view_to_find=self.device_info.password_done_btn_id)()
def check_condition(self):
if self.uidevice(className="android.widget.ListView", scrollable=True).wait.exists(timeout=self.wait_time):
self.uidevice(scrollable=True).scroll.to(text="Screen lock")
return self.uidevice(className="android.widget.TextView", resourceId="android:id/summary",
text="PIN").wait.exists(timeout=self.wait_time)
class remove_pin_screen_lock(ui_step):
""" description:
sets screen lock method to PIN <selected PIN>
if already set to PIN, it will skip
usage:
ui_steps.set_pin_screen_lock(pin = "1234")()
tags:
ui, android, click, button
"""
def __init__(
self, new_mode="Swipe", dut_pin="1234", wait_time=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.new_mode = new_mode
if dut_pin:
self.dut_pin = dut_pin
else:
self.dut_pin = "1234"
if wait_time:
self.wait_time = int(wait_time)
else:
self.wait_time = 5000
def do(self):
ui_steps.open_security_settings(serial=self.serial)()
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "Screen lock"})()
if self.uidevice(text="Confirm your PIN").wait.exists(timeout=self.wait_time):
ui_steps.edit_text(view_to_find={"resourceId": "com.android.settings:id/password_entry"},
value=self.dut_pin, serial=self.serial, is_password=True)()
self.uidevice.press("enter")
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": self.new_mode})()
# Remove device PIN protection
if self.uidevice(textContains=self.device_info.remove_pin_confirm_desc).wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={
"text": self.device_info.remove_pin_confirm_button})()
def check_condition(self):
if self.uidevice(className="android.widget.ListView", scrollable=True).wait.exists(timeout=self.wait_time):
self.uidevice(scrollable=True).scroll.to(text="Screen lock")
return self.uidevice(className="android.widget.TextView", resourceId="android:id/summary",
text=self.new_mode).wait.exists(timeout=self.wait_time)
class open_security_settings(adb_step):
""" description:
Opens the Security Settings page using an intent.
usage:
ui_steps.open_security_settings()()
tags:
ui, android, settings, security, intent
"""
def __init__(self, **kwargs):
adb_step.__init__(self, **kwargs)
self.component = "com.android.settings/.SecuritySettings"
def do(self):
ui_steps.am_start_command(
serial=self.serial,
component=self.component)()
def check_condition(self):
# check performed in the last step from do()
return True
class open_users_settings(adb_step):
""" description:
Opens the Security Settings page using an intent.
usage:
ui_steps.open_security_settings()()
tags:
ui, android, settings, security, intent
"""
def __init__(self, **kwargs):
adb_step.__init__(self, **kwargs)
self.component = 'com.android.settings/.Settings\$UserSettingsActivity'
def do(self):
ui_steps.am_start_command(
serial=self.serial,
component=self.component)()
def check_condition(self):
# check performed in the last step from do()
return True
class am_start_command(adb_step):
""" description:
Opens the WiFi Settings page using an intent.
usage:
wifi_steps.open_wifi_settings()()
tags:
ui, android, settings, wifi, intent
"""
def __init__(self, component=None, timeout=20,
view_to_check=None, view_presence=True, **kwargs):
self.component = component
self.timeout = timeout
self.view_to_check = view_to_check
self.view_presence = view_presence
if self.component:
self.package = self.component.split("/")[0]
adb_step.__init__(self, **kwargs)
self.step_data = False
def do(self):
if self.package:
clean_command = "pm clear {0}".format(self.package)
self.process = self.adb_connection.run_cmd(command=clean_command, ignore_error=False,
timeout=self.timeout, mode="sync")
if self.component:
open_command = "am start -n {0}".format(self.component)
self.step_data = self.adb_connection.run_cmd(command=open_command, ignore_error=False,
timeout=self.timeout, mode="sync")
def check_condition(self):
stdout = self.step_data.stdout.read()
stderr = self.step_data.stderr.read()
self.set_passm("am start -n {0}".format(self.component))
self.set_errorm(
"", "am start -n {0}: \n\tStdout\n{1}\n\tStderr\n{2}".format(self.component, stdout, stderr))
if "Error" in stdout or "Exception" in stdout:
self.step_data = False
return self.step_data
if self.view_to_check is None:
self.step_data = True
return self.step_data
if self.view_presence:
self.step_data = self.uidevice(
**self.view_to_check).wait.exists(timeout=self.timeout)
else:
self.step_data = self.uidevice(
**self.view_to_check).wait.gone(timeout=self.timeout)
return self.step_data
class am_stop_package(adb_step, ui_step):
""" Description:
Executes command 'adb shell am force-stop [package_name]'. Pass
package name to package_name parameter.
Usage:
ui_steps.am_stop_package(serial=serial,
package_name="com.android.settings")()
tags:
ui, android, stop, package
"""
def __init__(self, package_name, view_to_check=None,
view_presence=True, timeout=5000, **kwargs):
"""
:param package_name: package name of the app to be stopped
:param view_to_check: view after a package is closed to be checked
:param view_presence: should be True if appears, False if disappears
:param kwargs: serial and standard kwargs for base_step
"""
adb_step.__init__(self, **kwargs)
ui_step.__init__(self, **kwargs)
self.package_name = package_name
self.view_to_check = view_to_check
self.view_presence = view_presence
self.timeout = timeout
self.step_data = False
def do(self):
try:
self.adb_connection.run_cmd(
"am force-stop " + str(self.package_name))
self.step_data = True
except Exception as e:
info_message = "Exception encountered when stop " + \
str(self.package_name) + ": " + e.message
if self.serial:
info_message = "[ " + str(self.serial) + " ] " + info_message
self.logger.info(info_message)
def check_condition(self):
if self.step_data is False:
return False
if self.view_to_check is None:
return True
print self.view_to_check
if self.view_presence:
self.step_data = self.uidevice(
**self.view_to_check).wait.exists(timeout=self.timeout)
else:
self.step_data = self.uidevice(
**self.view_to_check).wait.gone(timeout=self.timeout)
return self.step_data
# TODO: rename with open_quick_settings_with_swipe
# add desciption
class open_notifications_menu(ui_step):
def __init__(self, **kwargs):
ui_step.__init__(self, **kwargs)
self.set_passm("Open notification menu - swipe")
self.set_errorm("", "Open notification menu - swipe")
self.x_center = self.uidevice.info['displayWidth'] / 2
self.y_center = self.uidevice.info['displayHeight'] / 2
def do(self):
ui_steps.swipe(serial=self.serial, sx=self.x_center, sy=1,
ex=self.x_center, ey=self.y_center, steps=10)()
time.sleep(1)
ui_steps.swipe(serial=self.serial, sx=self.x_center, sy=1,
ex=self.x_center, ey=self.y_center, steps=10)()
time.sleep(1)
def check_condition(self):
return ui_steps.wait_for_view(view_to_find={"resourceId": "com.android.systemui:id/quick_settings_container"},
serial=self.serial)()
class press_all_apps(ui_step):
""" description:
opens all application activity
usage:
ui_steps.press_all_apps()
tags:
ui, android, press, click, allapps, applications
"""
def __init__(self, **kwargs):
ui_step.__init__(self, **kwargs)
self.step_data = False
def do(self):
if self.device_info.all_apps_icon is not None:
ui_steps.press_home(serial=self.serial)()
time.sleep(1)
self.uidevice(descriptionContains="Apps").click.wait()
dut_platform = statics.Device(serial=self.serial)
self.dut_dessert = dut_platform.dessert
while not self.uidevice(text="Calculator").wait.exists(timeout=1000):
first_app = self.uidevice(className="android.widget.TextView")
maxx = self.uidevice.info['displaySizeDpX']
maxy = self.uidevice.info['displaySizeDpY']
# swipe horizontally
self.uidevice.swipe(
1, int(maxy / 2), int(maxx) - 1, int(maxy / 2))
self.uidevice.swipe(
int(maxx / 2), int(maxy / 2), int(maxx / 2), int(maxy) - 1)
if first_app == self.uidevice(className="android.widget.TextView"):
break
self.step_data = True
else:
self.logger.error("All apps icon not available for platform: {"
"0}".format(self.device_info.platform))
def check_condition(self):
if self.step_data is False:
return self.step_data
adb_connection = Adb(serial=self.serial)
product_name = adb_connection.parse_cmd_output(
cmd="cat /system/build.prop", grep_for="ro.product.name")
if product_name:
ro_name = product_name.split("=")[1]
if self.dut_dessert == "M" and ro_name is not "r0_bxtp_abl":
return self.uidevice(
resourceId="com.android.launcher3:id/apps_list_view").wait.exists(timeout=20000)
elif self.dut_dessert == "L":
return self.uidevice(
descriptionContains="Apps page 1 of").wait.exists(timeout=20000)
elif ro_name == "r0_bxtp_abl":
pass
class press_home(ui_step):
""" description:
opens home page
usage:
ui_steps.press_home()
tags:
ui, android, press, click, home, homepage
"""
def __init__(self, wait_time=20000, **kwargs):
self.wait_time = wait_time
ui_step.__init__(self, **kwargs)
# adb_connection = Adb(serial=self.serial)
# product_name = adb_connection.parse_cmd_output(
# cmd="cat /system/build.prop", grep_for="ro.product.name")
# self.ro_name = None
# if product_name:
# ro_name = product_name.split("=")[1]
# self.ro_name = ro_name
self.step_data = False
@property
def home_state(self):
#
if self.uidevice(textContains="Google").wait.exists(timeout=100) or \
self.uidevice(resourceId="com.android.systemui:id/user_name").wait.exists(timeout=100) or \
self.uidevice(descriptionContains="Home screen", className="android.widget.LinearLayout")\
.wait.exists(timeout=100) or \
self.uidevice(resourceId="com.google.android.googlequicksearchbox:id/vertical_search_button")\
.wait.exists(timeout=100) or \
self.uidevice(resourceId="com.google.android.googlequicksearchbox:id/search_edit_frame")\
.wait.exists(timeout=100) or \
self.uidevice(resourceId="com.android.car.overview:id/gear_button").wait.exists(timeout=100) or \
self.uidevice(resourceId="com.android.car.overview:id/voice_button").wait.exists(timeout=100) or \
self.uidevice(resourceId="com.android.launcher3:id/btn_qsb_search").wait.exists(timeout=100):
return True
else:
return False
def do(self):
# if self.home_state:
# self.logger.info("Home screen already present [ {0} ]".format(
# self.serial))
time_elapsed = 0
while time_elapsed < self.wait_time:
if self.device_info.device_type == "tab":
# self.uidevice.press.recent()
self.uidevice.press.home()
else:
self.uidevice.open.quick_settings()
if self.home_state:
self.step_data = True
break
time_elapsed += 300
def check_condition(self):
# time_elapsed = 0
# while time_elapsed < self.wait_time:
# if self.home_state:
# return True
# else:
# self.uidevice.press.home()
# time_elapsed += 300
return self.step_data
class press_recent_apps(ui_step):
""" description:
opens recent apps
usage:
ui_steps.press_recent_apps()
tags:
ui, android, press, click, recent apps, homepage
"""
def do(self):
if not self.uidevice(resourceId="com.android.systemui:id/task_view_thumbnail").wait.exists(timeout=1000) and \
not self.uidevice(textContains="recent items").wait.exists(timeout=1000):
self.uidevice.press.recent()
def check_condition(self):
self.uidevice.wait.update(timeout=5000)
return self.uidevice(text="No recent items").wait.exists(timeout=1000) or self.uidevice(
resourceId="com.android.systemui:id/task_view_thumbnail").wait.exists(timeout=1000)
class app_in_recent_apps(base_step):
""" description:
opens recent apps and checks if <app_name> app is present
usage:
ui_steps.app_in_recent_apps(app_name = "Chrome")
tags:
ui, android, press, click, recent apps, homepage, app
"""
def __init__(self, app_name, target="tablet", **kwargs):
self.app_name = app_name
self.target = target
base_step.__init__(self, **kwargs)
self.set_passm("{0} is present in recent apps".format(self.app_name))
self.set_errorm(
"", "Could not find {0} in recent apps".format(self.app_name))
def do(self):
ui_steps.press_recent_apps(serial=self.serial)()
def check_condition(self):
app_filter = {"descriptionContains": self.app_name}
if self.target == "tablet":
return ui_utils.is_view_visible_scroll_left(
view_to_find=app_filter)
elif self.target == "phone":
return ui_utils.is_view_visible(view_to_find=app_filter)
class open_app_from_recent_apps(android_step):
""" description:
Opens <app_name> app from recent apps.
If the DUT is a tablet (default), it will swipe to the
left for multiple apps in recent apps.
If the DUT is a phone, it will scroll down for multiple
apps in recent apps.
usage:
ui_steps.open_app_in_recent_apps(app_name = "Chrome")
tags:
ui, android, press, click, recent apps, homepage, app
"""
def __init__(self, app_name, target="tablet", **kwargs):
android_step.__init__(self, **kwargs)
self.app_name = app_name
self.target = target
self.set_passm("Open {0} from recent apps".format(self.app_name))
self.set_errorm(
"", "Could not open {0} from recent apps".format(self.app_name))
def do(self):
ui_steps.press_recent_apps(serial=self.serial)()
def check_condition(self):
app_filter = {"descriptionContains": self.app_name}
if self.target == "tablet":
return ui_utils.is_view_visible_scroll_left(
view_to_find=app_filter, click=True)
elif self.target == "phone":
return ui_utils.is_view_visible(
view_to_find=app_filter, click=True)
class open_app_from_allapps(ui_step):
""" description:
opens the application identified by <view_to_finWidgetd> from
all application activity
if <view_to_check> given it will check that
the object identified by <view_to_check>:
- appeared if <view_presence> is True
- disappeared if <view_presence> is False
usage:
ui.steps.open_app_from_allapps(view_to_find = {"text": "Settings"})()
tags:
ui, android, press, click, app, application, allapps
"""
def __init__(self, view_to_find, view_to_check=None,
view_presence=True, wait_time=20000, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_find = view_to_find
self.view_to_check = view_to_check
self.view_presence = view_presence
self.wait_time = wait_time
if "instance" not in self.view_to_find:
self.view_to_find["instance"] = 0
self.set_errorm(
"", "App {0} could not be opened applications page".format(view_to_find))
self.set_passm(
"App {0} opened for applications page".format(view_to_find))
def do(self):
ui_steps.press_all_apps(serial=self.serial)()
ui_utils.click_apps_entry(
serial=self.serial, view_to_find=self.view_to_find)
def check_condition(self):
if self.view_to_check is None:
return True
self.uidevice(**self.view_to_check).wait.exists(timeout=self.wait_time)
exists = self.uidevice(**self.view_to_check).wait.exists(timeout=1000)
return exists if self.view_presence else not exists
class find_app_from_allapps(ui_step):
""" description:
finds the application identified by <view_to_find> from
all application activity
usage:
ui.steps.find_app_from_allapps(view_to_find = {"text": "Settings"})()
tags:
ui, android, find, app, application, allapps
"""
def __init__(self, view_to_find, presence=True, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_find = view_to_find
self.presence = presence
self.set_errorm(
"", "App {0} was not found in applications page".format(view_to_find))
self.set_passm(
"App {0} found in applications page".format(view_to_find))
def do(self):
ui_steps.press_all_apps(serial=self.serial)()
def check_condition(self):
return ui_steps.wait_for_view(
serial=self.serial, view_to_find=self.view_to_find, presence=self.presence)()
class open_smart_lock_settings(ui_step):
""" description:
opens settings activity from all application page
usage:
ui_steps.open_settings()
tags:
ui, android, press, click, settings, allapps
"""
def __init__(self, pin, **kwargs):
self.pin = pin
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_security_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Smart Lock"},
view_to_check={"resourceId": "com.android.settings:id/password_entry"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"resourceId": "com.android.settings:id/password_entry"},
value=self.pin, is_password=True)()
self.uidevice.press("enter")
def check_condition(self):
return ui_steps.wait_for_view(
serial=self.serial, timeout=5000, view_to_find={"text": "Trusted devices"})()
class open_settings(ui_step):
""" description:
opens settings activity from all application page
usage:
ui_steps.open_settings()
tags:
ui, android, press, click, settings, allapps
"""
def __init__(
self, intent=False, settings_check_point=".*?(S|s)ettings", timeout=5000, **kwargs):
self.intent = intent
self.settings_check_point = settings_check_point
self.timeout = timeout
ui_step.__init__(self, **kwargs)
def do(self):
if self.intent:
ui_steps.am_start_command(
serial=self.serial, component="com.android.settings/.Settings")()
else:
all_apps_icon = self.device_info.all_apps_icon
if all_apps_icon is not None:
# for property in all_apps_icon:
# if self.uidevice(**property).wait.exists(
# timeout=self.timeout)
# self.uidevice(**property).click.wait()
# break
ui_steps.open_app_from_allapps(
serial=self.serial, view_to_find={"text": "Settings"})()
else:
ui_steps.press_car(serial=self.serial)()
# click_button_with_scroll(serial=self.serial, view_to_find={
# "text": "Settings"})()
# Todo need to replace the below lines with above commented
# line when two settings in car replaced with actual one.
# self.uidevice(scrollable=True).scroll.toEnd()
# self.uidevice(index="6",
# className="android.widget.FrameLayout").child(
# text="Settings").click()
for i in range(0, 5):
if self.uidevice(text="Settings").count == 2:
self.uidevice(text="Settings")[1].click()
break
else:
self.uidevice(scrollable=True).scroll()
else:
self.uidevice(text="Settings").click()
def check_condition(self):
return ui_steps.wait_for_view(serial=self.serial, timeout=5000,
view_to_find={"textMatches": self.settings_check_point}, iterations=1)()
class open_settings_app(ui_step):
""" description:
opens an app/activity ideintified by <view_to_find> from
settings page starting from homepahe
if <view_to_check> given it will check that
the object identified by <view_to_check>:
- appeared if <view_presence> is True
- disappeared if <view_presence> is False
usage:
ui_steps.open_settings_app(
view_to_find = {"text": "Apps"},
view_to_check = {"text": "Donwloaded"})()
tags:
ui, android, press, click, app, application, settings, homepage
"""
def __init__(self, view_to_find, view_to_check=None,
view_presence=True, wait_time=None, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_find = view_to_find
self.view_to_check = view_to_check
self.view_presence = view_presence
self.wait_time = wait_time
self.set_errorm("", "Could not open app {0} from settings checking {1}".format(
view_to_find, view_to_check))
self.set_passm(
"Open app {0} from settings checking {1}".format(view_to_find, view_to_check))
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.open_app_from_settings(
serial=self.serial, view_to_find=self.view_to_find)()
def check_condition(self):
if self.view_to_check is None:
return True
if self.wait_time:
self.uidevice(
**self.view_to_check).wait.exists(timeout=self.wait_time)
return self.uidevice(**self.view_to_check).wait.exists(timeout=1000)
class open_app_from_settings(ui_step):
""" description:
opens an app/activity ideintified by <view_to_find> from
settings page
if <view_to_check> given it will check that
the object identified by <view_to_check>:
- appeared if <view_presence> is True
- disappeared if <view_presence> is False
usage:
ui_steps.open_app_from_settings(view_to_find =
{"text": "Apps"}, view_to_check = {"text": "Donwloaded"})()
tags:
ui, android, press, click, app, application, settings
"""
def __init__(self, view_to_find, view_to_check=None,
view_presence=True, wait_time=None, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_find = view_to_find
self.view_to_check = view_to_check
self.view_presence = view_presence
self.wait_time = wait_time
# self.scroll = scroll
self.set_errorm("", "Could not open app {0} from settings checking {1}".format(
view_to_find, view_to_check))
self.set_passm(
"Open app {0} from settings checking {1}".format(view_to_find, view_to_check))
def do(self):
if self.uidevice(scrollable=False).exists:
self.uidevice(scrollable=False).scroll.vert.toBeginning(
steps=100, max_swipes=1000)
self.uidevice(scrollable=False).scroll.to(**self.view_to_find)
else:
self.uidevice(scrollable=True).scroll.vert.toBeginning(
steps=100, max_swipes=1000)
self.uidevice(scrollable=True).scroll.to(**self.view_to_find)
self.uidevice(**self.view_to_find).click.wait()
def check_condition(self):
if self.view_to_check is None:
return True
if self.wait_time:
self.uidevice(
**self.view_to_check).wait.exists(timeout=self.wait_time)
return self.uidevice(**self.view_to_check).wait.exists(timeout=1000)
# TODO: remake
class open_quick_settings(ui_step):
""" description:
opens quick settings
usage:
ui_steps.open_quick_settings()
tags:
ui, android, open, press, click, quicksettings
"""
def __init__(self, version="L", **kwargs):
ui_step.__init__(self, **kwargs)
self.version = version
self.set_errorm("", "Could not open quick setings")
self.set_passm("Open quick settings")
def do(self):
if self.device_info.dessert == "L" or self.device_info.dessert == "M":
time.sleep(1)
self.uidevice.open.quick_settings()
time.sleep(1)
self.uidevice.open.quick_settings()
time.sleep(1)
else:
self.uidevice.open.quick_settings()
def check_condition(self):
return self.uidevice(
descriptionContains="Battery").wait.exists(timeout=1000)
class open_playstore(ui_step):
""" description:
opens Play store application from all application page
usage:
ui_steps.open_play_store()
tags:
ui, android, press, click, playstore, allapps, applications
"""
def do(self):
ui_steps.open_app_from_allapps(
serial=self.serial, view_to_find={"text": "Play Store"})()
def check_condition(self):
self.uidevice.wait.idle()
return self.uidevice(text="Add a Google Account").wait.exists(timeout=5000) or \
self.uidevice(
text="HOME", resourceId="com.android.vending:id/title").wait.exists(timeout=20000)
class open_google_books(ui_step):
""" description:
opens Google Books application from all application page
usage:
ui_steps.open_google_books()
tags:
ui, android, press, click, books, allapps, applications
"""
def do(self):
ui_steps.open_app_from_allapps(
serial=self.serial, view_to_find={"text": "Play Books"})()
def check_condition(self):
self.uidevice.wait.idle()
return self.uidevice(text="Add a Google Account").wait.exists(timeout=100) or \
self.uidevice(text="Read Now").wait.exists(timeout=100) or \
self.uidevice(text="My Library").wait.exists(timeout=100) or \
self.uidevice(text="Settings").wait.exists(timeout=100) or\
self.uidevice(text="Help").wait.exists(timeout=100)
class add_google_account(ui_step):
""" description:
adds google account <account> from settings page
usage:
ui_steps.add_google_accout(version = "L",
account = "account_email",
paswword = "account_password")
tags:
ui, android, google, account, playstore, apps
"""
def __init__(self, account=uiconfig.GoogleAccount.EMAIL_ID,
password=uiconfig.GoogleAccount.PASSWORD, **kwargs):
ui_step.__init__(self, **kwargs)
self.account = account
self.password = password
self.version = self.debug_info.dessert
def do(self):
ui_steps.open_settings(serial=self.serial)()
if self.version == "L":
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Accounts"}, view_to_check={"text": "Add account"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Add account"}, view_to_check={"text": "Google"})()
elif self.version == "K":
ui_steps.open_app_from_settings(serial=self.serial, view_to_find={"text": "Add account"},
view_to_check={"text": "Add an account"})()
elif self.version == "O":
ui_steps.open_app_from_settings(serial=self.serial, view_to_find={"text": "Users & accounts"},
view_to_check={"text": "Users & accounts"})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "Add account"})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "Google"})()
ui_steps.edit_text(serial=self.serial, view_to_find={
"text": "Email or phone"}, value=self.account)()
ui_steps.click_button_common(
serial=self.serial, view_to_find={"text": "NEXT"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"text": "Enter your password"},
value=self.password, is_password=True)()
ui_steps.click_button_common(
serial=self.serial, view_to_find={"text": "NEXT"})()
ui_steps.click_button_common(
serial=self.serial, view_to_find={"text": "I AGREE"})()
# todo: check what else can be checked
ui_steps.click_button_common(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.gms:id/next_button"},
view_to_check={"text": self.account})() # todo: check what else can be checked
if self.version != "O":
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Google"},
view_to_check={"textContains": "Do you want to add an existing"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"text": "Existing"}, view_to_chec={"text": "Sign in"})()
ui_steps.edit_text(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.gsf.login:id/username_edit"},
value=self.account)()
ui_steps.edit_text(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.gsf.login:id/password_edit"},
value=self.password, is_password=True)()
ui_steps.click_button(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.gsf.login:id/next_button"},
view_to_check={"text": "OK"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "OK"}, wait_time=99999,
view_to_check={"text": "Google services"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"descriptionContains": self.device_info.next_btn_id})()
if self.version == "L":
self.uidevice(textContains="Set up payment").wait.exists(
timeout=3000)
ui_steps.click_button(serial=self.serial, view_to_find={"textContains": "Skip"}, wait_time=3000,
view_to_check={"text": "Google"})()
elif self.version == "K":
self.uidevice(serial=self.serial, text="Not now").wait.exists(
timeout=3000)
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Not now"}, wait_time=5000,
view_to_check={"textContains": "Account sign"})()
ui_steps.click_button(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.gsf.login:id/next_button"},
view_to_check={"text": "Settings"})()
def check_condition(self):
if self.version == "L":
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Google"}, wait_time=3000,
view_to_check={"text": self.account})()
elif self.version == "K":
ui_steps.open_app_from_settings(serial=self.serial, view_to_find={"text": "Google"},
view_to_check={"text": self.account})()
elif self.version == "O":
ui_steps.click_button_common(serial=self.serial, view_to_find={"text": self.account},
view_to_check={"text": self.account})()
class add_app_from_all_apps_to_homescreen(ui_step):
""" description:
Click on an App from App view and drag it
to home screen
usage:
test_verification('app_to_drag_from_app_view')()
tags:
homescreen, drag app icon
"""
def __init__(self, view_text, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_text = view_text
self.set_passm(str(self.view_text))
def do(self):
ui_steps.press_all_apps(serial=self.serial)()
app = self.uidevice(text=self.view_text)
self.x_coord = (
app.info['bounds']['left'] + app.info['bounds']['right']) / 2
self.y_coord = (
app.info['bounds']['bottom'] + app.info['bounds']['top']) / 2
ui_steps.swipe(serial=self.serial, sx=self.x_coord,
sy=self.y_coord, ex=self.x_coord, ey=self.y_coord)()
def check_condition(self):
self.uidevice.wait.update()
return self.uidevice(text=self.view_text).wait.exists(timeout=1000)
class uninstall_app_from_apps_settings(ui_step, base_step):
""" description:
unistalls <app_name> application from Apps page in settings
it check that <package_name> package is not present
anymore in pm list packages
usage:
ui_steps.uninstall_app_from_apps_settings(
app_name = "Angry Birds",
package_name = "com.rovio.angrybirdsstarwars.ads.iap")()
tags:
ui, android, uninstall, app, apps, application
"""
def __init__(self, app_name, package_name, **kwargs):
self.app_name = app_name
self.package_name = package_name
ui_step.__init__(self, **kwargs)
adb_step.__init__(self, **kwargs)
def do(self):
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.app_name},
view_to_check={"textContains": "Uninstall"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"textContains": "Uninstall"}, view_to_check={"text": "OK"})()
ui_steps.click_button(
serial=self.serial,
view_to_find={
"text": "OK"})()
if self.uidevice(text="Google Play Store").wait.exists(timeout=1000):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "OK"}, view_to_check={"text": "OK"},
view_presence=False)()
def check_condition(self):
if self.package_name:
command = "pm list packages"
grep_stdout = self.adb_connection.parse_cmd_output(
cmd=command,
grep_for=self.package_name
)
return self.package_name not in grep_stdout
else:
return True
class uninstall_app(ui_step):
""" description:
unistalls <app_name> application from Apps starting from
homepage
it check that <package_name> package is not present
anymore in pm list packages
usage:
ui_steps.uninstall_app(
app_name = "Angry Birds",
package_name = "com.rovio.angrybirdsstarwars.ads.iap")()
tags:
ui, android, uninstall, app, apps, application, homepage
"""
def __init__(self, app_name, package_name, **kwargs):
self.app_name = app_name
self.package_name = package_name
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.open_app_from_settings(
serial=self.serial, view_to_find={"text": "Apps"}, view_to_check="Donwloaded")()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.app_name},
view_to_check={"textContains": "Uninstall"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"textContains": "Uninstall"}, view_to_check={"text": "OK"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "OK"}, view_to_check={"text": "OK"},
view_presence=False)()
def check_condition(self):
if self.package_name:
command = "pm list packages"
grep_stdout = self.adb_connection.parse_cmd_output(
cmd=command,
grep_for=self.package_name
)
return self.package_name not in grep_stdout
else:
return True
class open_display_from_settings(ui_step):
""" description:
open display menu from settings
usage:
ui_steps.open_display_from_settings(view_to_check =
{"text":"Daydream"})()
tags:
ui, android, press, click, app, application, settings
"""
def __init__(self, view_to_check=None, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_check = view_to_check
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.open_app_from_settings(serial=self.serial, print_error="Failed to open Display",
view_to_find={"text": "Display"}, view_to_check={"text": "Daydream"})()
def check_condition(self):
if self.view_to_check is None:
return True
return self.uidevice(**self.view_to_check).wait.exists(timeout=1000)
class open_picture_from_gallery(ui_step, adb_step):
""" description:
open picture from gallery
usage:
ui_steps.open_picture_from_gallery()()
tags:
ui, android, press, click, picture, gallery
"""
def __init__(self, view_to_check=None, **kwargs):
ui_step.__init__(self, **kwargs)
adb_step.__init__(self, **kwargs)
self.view_to_check = view_to_check
def do(self):
ui_steps.open_app_from_allapps(
serial=self.serial, view_to_find={'text': 'Gallery'})()
resolution = ui_utils.get_resolution(serial=self.serial)
self.uidevice.click(int(resolution[0]) / 2, int(resolution[1]) / 3)
def check_condition(self):
if self.view_to_check is None:
return True
return self.uidevice(**self.view_to_check).wait.exists(timeout=1000)
class enable_developer_options(ui_step):
""" description:
enables developer options in Settings
usage:
ui_steps.enable_developer_options()()
tags:
ui, android, developer, options
"""
def __init__(self, intent=False, **kwargs):
self.intent = intent
ui_step.__init__(self, **kwargs)
def do(self):
if self.intent:
ui_steps.am_start_command(
serial=self.serial, component="com.android.settings/.DevelopmentSettings")()
else:
ui_steps.open_settings(serial=self.serial)()
ui_steps.click_button_common(
serial=self.serial, view_to_find={"text": "System"}, optional=True)()
ui_steps.click_button_common(serial=self.serial, view_to_find={"textContains": "About "},
view_to_check={"text": "Build number"})()
for i in range(7):
ui_steps.click_button_common(
serial=self.serial, view_to_find={"text": "Build number"})()
ui_steps.press_back(serial=self.serial)()
def check_condition(self):
if self.intent:
return True
return self.uidevice(
text="Developer options").wait.exists(timeout=1000)
class disable_options_from_developer_options(ui_step):
""" description:
disables an option from developer options
usage:
ui_steps.disable_options_from_developer_options(developer_options =
["Verify apps over USB"])()
tags:
ui, android, disable, developer options
"""
def __init__(self, developer_options, enabled=False, **kwargs):
ui_step.__init__(self, **kwargs)
self.enabled = enabled
self.dev_opts = developer_options
self.set_passm("{0} is(are) disabled".format(developer_options))
self.set_errorm(
"", "One or more options from {0} could not be disabled".format(developer_options))
def do(self):
if not self.enabled:
if not ui_utils.is_developer_options_enabled(serial=self.serial):
ui_steps.press_home(serial=self.serial)()
ui_steps.enable_developer_options(serial=self.serial)()
ui_steps.click_button_common(serial=self.serial, view_to_find={"text": "Developer options"},
view_to_check={"text": "Take bug report"})()
version = adb_utils.get_android_version(
serial=self.serial, full_version_name=True)
is_switch = True
if version == "5.0":
is_switch = False
for opt in self.dev_opts:
ui_steps.click_checkbox_button(serial=self.serial, view_to_find={"text": opt}, state="OFF", scrollable=True,
is_switch=is_switch, relationship="right")()
def check_condition(self):
return True
class enable_options_from_developer_options(ui_step):
""" description:
enables an option from developer options
if <enabled> parameter is True, <Developer options> is enabled
usage:
ui_steps.enable_options_from_developer_options(developer_options =
["Verify apps over USB"])()
tags:
ui, android, enable, developer options
"""
def __init__(
self, developer_options, enabled=False, confirm_view="Enable", **kwargs):
self.enabled = enabled
self.confirm_view = confirm_view
ui_step.__init__(self, **kwargs)
self.dev_opts = developer_options
self.set_passm("{0} is(are) enabled".format(developer_options))
self.set_errorm(
"", "One or more options from {0} could not be enabled".format(developer_options))
def do(self):
if not self.enabled:
if not ui_utils.is_developer_options_enabled(serial=self.serial):
ui_steps.press_home(serial=self.serial)()
ui_steps.enable_developer_options(serial=self.serial)()
ui_steps.click_button_common(serial=self.serial, view_to_find={"text": "Developer options"},
view_to_check={"text": "Take bug report"})()
version = adb_utils.get_android_version(
serial=self.serial, full_version_name=True)
is_switch = True
if version == "5.0":
is_switch = False
for opt in self.dev_opts:
ui_steps.click_checkbox_button(serial=self.serial, view_to_find={"text": opt}, is_switch=is_switch,
scrollable=True, confirm_view={
"text": self.confirm_view},
relationship="right")()
def check_condition(self):
return True
class enable_oem_unlock(ui_step):
""" description:
enables Oem unlock from "Developer options"
usage:
ui_steps.enable_oem_unlock()()
tags:
ui, android, enable, developer options
"""
def __init__(self, enabled=False, **kwargs):
self.enabled = enabled
ui_step.__init__(self, **kwargs)
def do(self):
if not self.enabled:
ui_steps.press_home(serial=self.serial)()
ui_steps.enable_developer_options(
serial=self.serial,
intent=True)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Developer options"},
view_to_check={"text": "Take bug report"})()
version = adb_utils.get_android_version(
serial=self.serial, full_version_name=True)
is_switch = True
oem_switch_text = "OEM unlocking"
if version == "5.0":
is_switch = False
oem_switch_text = "Enable OEM unlock"
ui_steps.click_checkbox_button(serial=self.serial, view_to_find={"text": oem_switch_text}, is_switch=is_switch,
scrollable=True, confirm_view=self.device_info.oem_unlock_btn_id,
relationship="right")()
def check_condition(self):
# Check performed in the last step from do()
return True
class allow_unknown_sources(ui_step):
""" description:
enables/disables Unknwon sources according to <state>
usage:
cts_steps.allow_unknown_sources(state = "ON")()
tags:
ui, android, cts, allow, unknown_sources
"""
def __init__(self, state="ON", **kwargs):
self.state = state
ui_step.__init__(self, **kwargs)
self.set_passm("Allow unknown sources is {0}".format(self.state))
self.set_errorm(
"", "Allow unknown sources could not be set {0}".format(self.state))
def do(self):
ui_steps.open_security_settings(serial=self.serial)()
self.uidevice(scrollable=True).scroll.to(text="Unknown sources")
if self.state == "ON":
ui_steps.click_checkbox_button(serial=self.serial, view_to_find={"text": "Unknown sources"},
confirm_view={"text": "OK"}, state=self.state, is_switch=True,
relationship="right")()
else:
ui_steps.click_checkbox_button(serial=self.serial, view_to_find={"text": "Unknown sources"},
state=self.state, is_switch=True, relationship="right")()
def check_condition(self):
return ui_utils.is_checkbox_checked(serial=self.serial, view_to_find={"text": "Unknown sources"},
is_switch=True, relationship="right")
class put_device_into_sleep_mode(ui_step):
""" description:
sets the device into sleep mode with sleep button
checks the logcat for sleep message
fails if the DUT is already in sleep mode
usage:
ui_steps.put_device_into_sleep_mode()()
tags:
ui, android, sleep
"""
def __init__(self, tries=5, **kwargs):
ui_step.__init__(self, **kwargs)
self.tries = tries
def do(self):
while adb_utils.is_power_state(serial=self.serial, state="ON") and self.tries > 0:
self.uidevice.sleep()
time.sleep(3)
self.tries -= 1
def check_condition(self):
return adb_utils.is_power_state(serial=self.serial, state="OFF")
class wake_up_device(ui_step):
""" description:
wakes the device from sleep with sleep button
checks the logcat for wake message
usage:
ui_steps.wake_up_device()()
tags:
ui, android, wake
"""
def __init__(self, tries=5, **kwargs):
ui_step.__init__(self, **kwargs)
self.tries = tries
def do(self):
adb_utils.stay_on(serial=self.serial)
while adb_utils.is_power_state(serial=self.serial, state="OFF") and self.tries > 0:
self.uidevice.wakeup()
time.sleep(3)
self.tries -= 1
def check_condition(self):
return adb_utils.is_power_state(serial=self.serial, state="ON")
class unlock_device_swipe(ui_step):
""" description:
unlocks the screen with swipe
usage:
ui_steps.unlock_device_swipe()()
tags:
ui, android, unlock
"""
def __init__(self, **kwargs):
ui_step.__init__(self, **kwargs)
self.set_passm("Unlock device - swipe")
self.set_errorm("", "Unlock device - swipe")
def do(self):
# Sometimes the screen may be off on some low performance devices
self.uidevice.wakeup()
ui_steps.swipe(
serial=self.serial,
sx=200,
sy=600,
ex=200,
ey=0,
steps=15)()
time.sleep(2)
def check_condition(self):
return not ui_utils.is_device_locked(serial=self.serial)
class unlock_device_pin(ui_step):
""" description:
unlocks the screen with PIN
usage:
ui_steps.unlock_device_pin(pin = "1234")()
tags:
ui, android, unlock, PIN
"""
def __init__(self, pin="1234", wrong_pin=False, **kwargs):
ui_step.__init__(self, **kwargs)
self.pin = pin
self.wrong_pin = wrong_pin
self.set_passm("Unlock device - PIN")
self.set_errorm("", "Unlock device - PIN")
def do(self):
# Sometimes the screen may be off on some low performance devices
self.uidevice.wakeup()
self.uidevice(descriptionContains="PIN area").wait.exists(timeout=1000)
ui_steps.edit_text(serial=self.serial, view_to_find={"descriptionContains": "PIN area"},
is_password=True, value=self.pin)()
ui_steps.click_button(
serial=self.serial, view_to_find={"descriptionContains": "Enter"})()
self.uidevice(descriptionContains="Enter").wait.gone()
def check_condition(self):
if self.wrong_pin:
return ui_utils.is_device_pin_locked(serial=self.serial)
else:
return not ui_utils.is_device_pin_locked(serial=self.serial)
class unlock_device(ui_step):
""" description:
unlocks the screen with swipe and/or PIN
usage:
ui_steps.unlock_device()()
tags:
ui, android, unlock
"""
def __init__(self, pin=None, **kwargs):
ui_step.__init__(self, **kwargs)
self.set_passm("Unlock device with swipe and/or PIN")
self.set_errorm("", "Unlock device with swipe and/or PIN")
self.pin = pin
def do(self):
# Sometimes the screen may be off on some low performance devices
self.uidevice.wakeup()
if ui_utils.is_device_locked(serial=self.serial):
ui_steps.unlock_device_swipe(serial=self.serial)()
if ui_utils.bxtp_car_locked(serial=self.serial):
ui_steps.click_button(serial=self.serial, view_to_find={
"resourceId": "com.android.systemui:id/user_name"})()
if self.pin and ui_utils.is_device_pin_locked(serial=self.serial):
ui_steps.unlock_device_pin(serial=self.serial, pin=self.pin)()
def check_condition(self):
return not ui_utils.is_device_locked(serial=self.serial)
class perform_startup_wizard(ui_step):
""" description:
performs start-up wizard
usage:
ui_steps.perform_startup_wizard(serial = "some_serial")()
tags:
ui, android, startup, wizard
"""
def __init__(self, wait_time=2000, **kwargs):
self.wait_time = wait_time
ui_step.__init__(self, **kwargs)
def do(self):
print "[ {0} ]: Set startup wizard language to United States if necessary".format(self.serial)
if not self.uidevice(textContains=self.device_info.predefined_language_text_id)\
.wait.exists(timeout=self.wait_time):
if self.uidevice(resourceId="android:id/numberpicker_input").wait.exists(timeout=self.wait_time):
view_to_scroll = {
"resourceId": "android:id/numberpicker_input"}
else:
ui_steps.click_button(serial=self.serial,
view_to_find={"resourceId":
"com.google.android.setupwizard:id/language_picker"})()
view_to_scroll = {
"resourceId": "android:id/select_dialog_listview"}
if not self.uidevice(textContains=self.device_info.predefined_language_text_id)\
.wait.exists(timeout=self.wait_time):
ui_steps.scroll_to_text_from_scrollable(text_to_find=self.device_info.predefined_language_text_id,
serial=self.serial, view_to_scroll=view_to_scroll,
iterations=200,
direction="down", critical=False, blocking=False)()
ui_steps.scroll_to_text_from_scrollable(text_to_find=self.device_info.predefined_language_text_id,
serial=self.serial, view_to_scroll=view_to_scroll,
iterations=200, direction="up")()
else:
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "United State"})()
if self.device_info.dessert == "M":
ui_steps.click_button(
serial=self.serial, view_to_find={"textContains": "United State"})()
print "[ {0} ]: Start performing startup wizard".format(self.serial)
ui_steps.click_button(serial=self.serial, view_to_find={"resourceId":
"com.google.android.setupwizard:id/start"},
view_to_check={"descriptionContains": "Back"})()
print "[ {0} ]: Set up as new".format(self.serial)
if self.uidevice(text="Set up as new").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Set up as new"}, wait_time=self.wait_time,
view_to_check={"text": "Get connected"})()
print "[ {0} ]: Skip or configure SIM settings if necessary".format(self.serial)
if self.uidevice(textContains="SIM").wait.exists(timeout=self.wait_time):
if self.uidevice(textContains="Skip").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Skip"}, wait_time=self.wait_time,
view_to_check={"textContains": "network"})()
elif self.uidevice(textContains=self.device_info.next_btn_id).wait.exists(timeout=self.wait_time):
print "[ {0} ]: Selecting a SIM for cellular data".format(self.serial)
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
wait_time=self.wait_time, view_to_check={"textContains": "a SIM for calls"})()
print "[ {0} ]: Selecting a SIM for calls".format(self.serial)
ui_steps.click_button(serial=self.serial,
view_to_find={
"className": "android.widget.RadioButton", "instance": 1},
wait_time=self.wait_time, view_to_check={"textContains": "a SIM for calls"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
wait_time=self.wait_time, view_to_check={"textContains": "a SIM for text "
"messages"})()
print "[ {0} ]: Selecting a SIM for text messages".format(self.serial)
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
wait_time=self.wait_time, view_to_check={"textContains": "network"})()
print "[ {0} ]: Skip network settings".format(self.serial)
ui_steps.click_button(serial=self.serial, view_to_find=self.device_info.skip_wifi_btn_id,
view_to_check=self.device_info.wifi_skip_anyway_btn_id)()
ui_steps.click_button(
serial=self.serial, view_to_find=self.device_info.wifi_skip_anyway_btn_id)()
print "[ {0} ]: Wait for connection and update checking if necessary".format(self.serial)
if self.uidevice(textContains="Checking connection").wait.exists(timeout=self.wait_time):
timeout = 2 * self.wait_time * 60
wait_time = 0
while not self.uidevice(descriptionContains="Add your account")\
.wait.exists(timeout=1000) and wait_time < timeout:
wait_time += self.wait_time
time.sleep(self.wait_time / 1000)
ui_steps.wake_up_device(serial=self.serial)()
if self.uidevice(textContains="Got another device").wait.exists(timeout=self.wait_time):
print "[ {0} ]: Skip copying stuff from another device".format(self.serial)
ui_steps.click_button(serial=self.serial, view_to_find={"text": "No thanks"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": self.device_info.next_btn_id})()
print "[ {0} ]: Checking connection page disappeared in {1} seconds"\
.format(self.serial, int(wait_time / 1000))
if self.uidevice(descriptionContains="Add your account").wait.exists(timeout=1000):
print "[ {0} ]: Add your account page appeared".format(self.serial)
wait_time = 0
while not self.uidevice(descriptionContains="Or create a new account")\
.wait.exists(timeout=1000) and wait_time < timeout:
wait_time += self.wait_time
time.sleep(self.wait_time / 1000)
ui_steps.wake_up_device(serial=self.serial)()
if wait_time < timeout:
print "[ {0} ]: 'Or create a new account' option appeared in {1} seconds"\
.format(self.serial, int(wait_time / 1000))
wait_time = 0
while not self.uidevice(resourceId="skip").wait.exists(timeout=1000) and wait_time < timeout:
wait_time += self.wait_time
time.sleep(self.wait_time / 1000)
ui_steps.wake_up_device(serial=self.serial)()
if wait_time < timeout:
print "[ {0} ]: Skip option appeared in {1} seconds".format(self.serial, int(wait_time / 1000))
ui_steps.click_button(serial=self.serial, view_to_find={"resourceId": "skip"},
view_to_check={"descriptionContains": "Skip account setup"})()
self.uidevice.press(61)
self.uidevice.press("enter")
self.uidevice(descriptionContains="Skip account setup").wait.gone(
timeout=self.wait_time)
else:
print "[ {0} ]: Skip option did not appear before the timeout of {1} seconds"\
.format(self.serial, int(timeout))
else:
print "[ {0} ]: 'Or create a new account' option did not appear before the timeout of {1} " \
"seconds".format(self.serial, int(timeout))
print "[ {0} ]: Skip Google Services".format(self.serial)
if self.uidevice(textContains="Google services").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"resourceId":
"com.google.android.gms:id/suw_navbar_more"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
view_to_check={"textContains": "Date"})()
print "[ {0} ]: Accept Date page if necessary".format(self.serial)
if self.uidevice(textContains="Date").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
view_to_check={"text": "Name"})()
print "[ {0} ]: Accept Name page".format(self.serial)
ui_steps.click_button(
serial=self.serial, view_to_find={"text": self.device_info.next_btn_id})()
print "[ {0} ]: Skip email setup".format(self.serial)
if self.uidevice(textContains="Set up email").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Not now"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": self.device_info.next_btn_id})()
print "[ {0} ]: Skip PIN settings if necessary".format(self.serial)
if self.uidevice(resourceId="com.google.android.setupwizard:id/lock_screen_intro_check_box")\
.wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial,
view_to_find={
"resourceId": "com.google.android.setupwizard:id/lock_screen_intro_check_box"},
view_to_check=self.device_info.skip_pin_btn_id)()
ui_steps.click_button(serial=self.serial, view_to_find=self.device_info.skip_pin_btn_id,
view_to_check=self.device_info.skip_anyway_btn_id)()
ui_steps.click_button(
serial=self.serial, view_to_find=self.device_info.skip_anyway_btn_id)()
print "[ {0} ]: Collapse Google services page if necessary".format(self.serial)
if self.uidevice(text="More").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "More"},
view_to_check={"text": self.device_info.next_btn_id})()
if self.uidevice(description="More").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More"},
view_to_check={"text": self.device_info.next_btn_id})()
print "[ {0} ]: Finish setup wizard".format(self.serial)
ui_steps.click_button(
serial=self.serial, view_to_find=self.device_info.finish_startup_btn_id)()
print "[ {0} ]: Setup wizard performed".format(self.serial)
def check_condition(self):
return ui_utils.is_homescreen(serial=self.serial)
class perform_startup_wizard_for_new_user(ui_step):
""" description:
performs start-up wizard
usage:
ui_steps.perform_startup_wizard(serial = "some_serial")()
tags:
ui, android, startup, wizard
"""
def __init__(self, user_name, **kwargs):
ui_step.__init__(self, **kwargs)
self.user_name = user_name
def do(self):
if self.device_info.dessert == "M":
if ui_utils.is_view_displayed(serial=self.serial, view_to_find={"description": "More"}):
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Continue"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
view_to_check={"textContains": "network"})()
elif self.device_info.dessert == "L":
ui_steps.click_button(serial=self.serial, view_to_find={"resourceId":
"com.google.android.setupwizard:id/start"},
view_to_check={"text": "Skip"})()
if self.uidevice(textContains="SIM").wait.exists(timeout=1000):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Skip"}, wait_time=10000,
view_to_check={"textContains": "network"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"text": "Skip"}, view_to_check={"text": "Skip anyway"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"text": "Skip anyway"}, view_to_check={"text": "Name"})()
ui_steps.edit_text(serial=self.serial, view_to_find={
"className": "android.widget.EditText"}, value=self.user_name)()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": self.device_info.next_btn_id})()
if ui_utils.is_view_displayed(serial=self.serial, view_to_find={"text": "More"}):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "More"},
view_to_check={"text": self.device_info.next_btn_id})()
if ui_utils.is_view_displayed(serial=self.serial, view_to_find={"description": "More"}):
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More"},
view_to_check={"text": self.device_info.next_btn_id})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": self.device_info.next_btn_id})()
time.sleep(2)
if ui_utils.is_view_displayed(serial=self.serial, view_to_find={"text": "GOT IT"}):
ui_steps.click_button(
serial=self.serial,
view_to_find={
"text": "GOT IT"})()
def check_condition(self):
return True
class set_orientation(ui_step):
""" description:
sets the orientation of the device
available options:
- phones: portrait, reverse-portrait and landscape
- tablets: portrait, reverse-landscape and landscape
usage:
ui_steps.set_orientation(orientation = "landscape",
target = "phone")()
tags:
ui, android, orientation, display
"""
__orientation = {}
__orientation["tablet"] = {}
__orientation["phone"] = {}
__orientation["tablet"]["landscape"] = "right"
__orientation["tablet"]["portrait"] = "natural"
__orientation["tablet"]["reverse-portrait"] = "left"
# upsidedown cannot be set
# _orientation["tablet"]["reverse-landscape"] = "upsidedown"
__orientation["phone"]["landscape"] = "left"
__orientation["phone"]["portrait"] = "natural"
__orientation["phone"]["reverse-landscape"] = "right"
# upsidedown cannot be set
# _orientation["phone"]["reverse-landscape"] = "upsidedown"
def __init__(self, orientation="landscape", target="tablet", **kwargs):
ui_step.__init__(self, **kwargs)
self.orientation = orientation
self.target = target
self.set_passm("Setting orientation to {0}".format(self.orientation))
self.set_errorm("", "Setting orientation to ".format(self.orientation))
def do(self):
self.uidevice.orientation = self.__orientation[
self.target][self.orientation]
time.sleep(1)
def check_condition(self):
return self.uidevice.orientation == self.__orientation[
self.target][self.orientation]
class close_app_from_recent(ui_step):
""" description:
Close application from recent apps.
If you have multiple applications opened this step will scroll
through recent apps until it find the view or the list is over.
usage:
ui_steps.close_app_from_recent(view_to_find=
{"text": "YouTube"})()
tags:
ui, android, scroll,recent apps,close app, close
"""
def __init__(self, view_to_find, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_find = view_to_find
def do(self):
ui_steps.press_home(serial=self.serial)()
ui_steps.press_recent_apps(serial=self.serial)()
if ui_utils.swipe_to_app_from_recent(serial=self.serial, view_to_find=self.view_to_find):
self.uidevice(**self.view_to_find).swipe.right()
def check_condition(self):
return not ui_utils.swipe_to_app_from_recent(
serial=self.serial, view_to_find=self.view_to_find)
class open_widget_section(ui_step):
""" description:
opens the widget section on L dessert
usage:
open_widget_section()()
tags:
android, L, ui, widget, homescreen
"""
def do(self):
ui_steps.press_home(serial=self.serial)()
page_indicator = self.uidevice(
resourceId="com.google.android.googlequicksearchbox:id/page_indicator")
x = (page_indicator.info["bounds"]["left"] +
page_indicator.info["bounds"]["right"]) / 2
y = (page_indicator.info["bounds"]["top"] +
page_indicator.info["bounds"]["bottom"]) / 2
ui_steps.swipe(serial=self.serial, sx=x, sy=y, ex=x, ey=y, steps=100)()
ui_steps.click_button(view_to_find={"text": "Widgets"})()
def check_condition(self):
return self.uidevice(text="Analog clock").wait.exists(timeout=1000)
class add_widget_to_homescreen(ui_step):
""" description:
adds a widget to the homescreen. Homescreen should be empty
usage:
add_widget_to_homescreen(widget_name = "Sound search")()
tags:
android, ui, widget, homescreen
"""
def __init__(self, widget_name, displayed_name, **kwargs):
self.widget_name = widget_name
self.displayed_name = displayed_name
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_widget_section(serial=self.serial)()
if ui_utils.is_text_visible_scroll_left(serial=self.serial, text_to_find=self.widget_name):
widget = self.uidevice(text=self.widget_name)
x = (widget.info["bounds"]["left"] +
widget.info["bounds"]["right"]) / 2
y = (widget.info["bounds"]["top"] +
widget.info["bounds"]["bottom"]) / 2
ui_steps.swipe(
serial=self.serial,
sx=x,
sy=y,
ex=x,
ey=y,
steps=100)()
self.step_data = True
else:
self.step_data = False
def check_condition(self):
self.uidevice.wait.update()
return self.step_data and self.uidevice(
textContains=self.displayed_name).wait.exists(timeout=1000)
class open_add_google_account_wizard(ui_step):
""" description:
opens add google account wizard from Settings
usage:
ui_steps.open_add_google_account_wizard()()
tags:
ui, android, google, account
"""
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Accounts"},
view_to_check={"text": "Add account"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Add account"},
view_to_check={"text": "Google"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Google"},
view_to_check={"descriptionContains": "Enter your email"})()
class open_google_account_for_edit(ui_step):
""" description:
opens google accounts for editing
usage:
ui_steps.open_google_account_for_edit(serial = serial)()
tags:
ui, android, google, account
"""
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Accounts"},
view_to_check={"text": "Google"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Google"},
view_to_check={"resourceId": "android:id/title", "text": "Accounts"})()
class remove_google_account(ui_step):
""" description:
removes gmail account given its name
usage:
ui_steps.remove_google_account(account = "intelchat002@gmail.com")()
tags:
ui, android, google, account
"""
def __init__(self, account="intelchat002@gmail.com", **kwargs):
self.account = account
ui_step.__init__(self, **kwargs)
def do(self):
self.acc_no = gms_utils.get_google_account_number(serial=self.serial)
ui_steps.open_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Accounts"},
view_to_check={"text": "Add account"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Google"},
view_to_check={"text": "Google"})()
ui_steps.click_button(serial=self.serial, view_to_find={"textContains": self.account},
view_to_check={"text": "Sync"})()
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More options"},
view_to_check={"text": "Remove account"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Remove account"},
view_to_check={"textContains": "Removing this account"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Remove account"},
view_to_check={"text": "Accounts"})()
def check_condition(self):
acc_no = gms_utils.get_google_account_number(serial=self.serial)
ui_steps.close_app_from_recent(
serial=self.serial, view_to_find={"text": "Settings"})()
return acc_no == (self.acc_no - 1)
class remove_all_google_accounts(ui_step):
""" description:
removes all gmail accounts
usage:
ui_steps.remove_google_account()()
tags:
ui, android, google, account
"""
def do(self):
while gms_utils.get_google_account_number(serial=self.serial) > 0:
ui_steps.remove_google_account(
serial=self.serial,
account="gmail.com")()
class show_as_list(ui_step):
""" description:
Show as list when grid or list is available in More options
usage:
ui_steps.show_as_list(serial = serial)()
tags:
ui, android, list, grid, open
"""
def do(self):
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More options"},
view_to_check={"textContains": "view"})()
self.uidevice.wait.idle()
if not ui_steps.click_button(serial=self.serial, view_to_find={"text": "List view"}, optional=True)():
self.uidevice.press.back()
class close_all_app_from_recent(ui_step):
""" description:
close all application from recent apps
usage:
ui_steps.close_all_app_from_recent()()
tags:
ui, android, recent, close app,
"""
def __init__(self, **kwargs):
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.press_home(serial=self.serial)()
ui_steps.press_recent_apps(serial=self.serial)()
self.uidevice(
resourceId="com.android.systemui:id/task_view_content").wait.exists(timeout=20000)
while(self.uidevice(resourceId="com.android.systemui:id/task_view_content").count > 0):
self.uidevice(
resourceId="com.android.systemui:id/task_view_content").swipe.right()
def check_condition(self):
ui_steps.press_home(serial=self.serial)()
ui_steps.press_recent_apps(serial=self.serial)()
exist_stat = self.uidevice(
text="Your recent screens appear here").wait.exists(timeout=1000)
ui_steps.press_home(serial=self.serial)()
return exist_stat
class ClearRecentApps(ui_step):
""" Description:
Clear recent apps in recent apps windows
Usage:
bluetooth_steps.ClearRecentApps(serial=serial)
"""
def __init__(self, app=None, timeout=8000, **kwargs):
super(ClearRecentApps, self).__init__(**kwargs)
self.app = app
self.step_data = True
self.timeout = timeout
self.set_passm("Cleared all recent apps")
def do(self):
try:
ui_steps.press_recent_apps(serial=self.serial)()
if not ui_steps.wait_for_view(view_to_find={"resourceId": "com.android.systemui:id/dismiss_task"},
serial=self.serial, optional=True)():
self.set_passm("No recent apps to clear")
else:
if self.uidevice(textContains="CLEAR ALL").exists:
self.uidevice(textContains="CLEAR ALL").click()
else:
for i in range(0, 5):
self.uidevice(scrollable=True).scroll.toBeginning()
if self.uidevice(textContains="CLEAR ALL").exists:
self.uidevice(textContains="CLEAR ALL").click()
break
# if ui_steps.click_button(textContains="CLEAR ALL", serial=self.serial, optional=True)():
# break
# self.uidevice.press.home()
except Exception as e:
self.set_errorm(e.message, "Failed to clear recent apps")
self.step_data = False
def check_condition(self):
time.sleep(1)
if self.step_data and self.uidevice(resourceId="com.android.systemui:id/dismiss_task").exists:
self.step_data = False
self.set_errorm("ClearRecentApps", "Still some apps are "
"available in recent apps window")
self.uidevice.press("home")
return self.step_data
class set_timezone_from_settings(ui_step):
""" description:
Configures system timezone to a specified value.
usage:
ui_steps.set_timezone_from_settings(serial = serial,
timezone = "London")()
tags:
ui, android, timezone
"""
def __init__(self, timezone="London", **kwargs):
self.timezone = timezone
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Date & time"},
view_to_check={"text": "Select time zone"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Select time zone"},
view_to_check={"description": "Navigate up"})()
if not self.uidevice(text=self.timezone).wait.exists(timeout=1000):
ui_steps.scroll_up_to_view(serial=self.serial, view_to_check={
"text": self.timezone}, ey=100, critical=False)()
if not self.uidevice(text=self.timezone).wait.exists(timeout=1000):
ui_steps.scroll_up_to_view(serial=self.serial, view_to_check={
"text": self.timezone}, ey=500, iterations=20)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.timezone},
view_to_check={"text": "Select time zone"})()
def check_condition(self):
return self.uidevice(text="Select time zone").wait.exists(timeout=1000)
class sync_google_account(ui_step):
""" description:
Attempts to sync an existing google account.
Returns True on success, False on failure.
Sync is attempted <max_attempts> times
usage:
ui_steps.sync_google_account(serial = serial,
account = "account@gmail.com",
password = "password",
max_attempts = 42)()
tags:
ui, android, timezone
"""
def __init__(self,
account="intelchat002@gmail.com",
password="intel002",
max_attempts=2,
**kwargs):
self.account = account
self.password = password
self.max_attempts = max_attempts
ui_step.__init__(self, **kwargs)
def do(self):
try:
while self.step_data is not True and self.max_attempts > 0:
ui_steps.open_google_account_for_edit(serial=self.serial)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.account},
view_to_check={"resourceId": "com.android.settings:id/user_id",
"text": self.account})()
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More options"},
view_to_check={"text": "Sync now"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Sync now"},
view_to_check={"description": "More options"})()
ui_steps.click_button(serial=self.serial, view_to_find={"description": "More options"},
view_to_check={"text": "Remove account"})()
if self.uidevice(text="Sync now").wait.exists(timeout=60000):
self.uidevice.press.back()
if self.uidevice(textContains="experiencing problems").wait.exists(timeout=1000):
self.step_data = False
ui_steps.handle_google_action_required(serial=self.serial, account=self.account,
password=self.password)()
else:
self.step_data = True
else:
self.step_data = False
self.max_attempts = self.max_attempts - 1
except Exception:
self.logger.error(traceback.format_exc())
self.step_data = False
pass
def check_condition(self):
# TODO: proper check_condition after implementing non-inheritable
# failures
return True
class handle_google_action_required(ui_step):
""" description:
If "Action required" message is displayed, reenter password.
usage:
ui_steps.handle_google_action_required(serial = serial,
account = "account@gmail.com",
password = "password")()
tags:
ui, android, google account
"""
def __init__(self,
account="intelchat002@gmail.com",
password="intel002",
**kwargs):
self.account = account
self.password = password
ui_step.__init__(self, **kwargs)
def do(self):
try:
ui_steps.open_quick_settings(serial=self.serial)()
if self.uidevice(text="Account Action Required").wait.exists(timeout=20000):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Account Action Required"},
view_to_check={"text": "Try again"})()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Try again"},
view_to_check={"text": "Re-type password"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"className": "android.widget.EditText"},
value=self.password, is_password=True)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.device_info.next_btn_id},
view_to_check={"textContains": "Checking info"})()
self.uidevice(textContains="Checking info").wait.gone(
timeout=60000)
ui_steps.open_quick_settings(serial=self.serial)()
if self.uidevice(text="Account Action Required").wait.exists(timeout=20000):
self.step_data = False
else:
self.step_data = True
self.uidevice.press.back()
else:
self.step_data = True
except Exception:
self.logger.error(traceback.format_exc())
self.step_data = False
pass
def check_condition(self):
# TODO: proper check_condition after implementing non-inheritable
# failures
return True
class set_orientation_vertical(ui_step):
""" description:
Sets the device orientation to the 'portrait' or 'landscape' as defined
for devices of type phone.
usage:
ui_steps.set_orientation_vertical(serial = serial, orientation='portrait')()
tags:
ui, android, click, button
"""
__orientation = {}
__orientation["tablet"] = {}
__orientation["phone"] = {}
__orientation["tablet"]["landscape"] = "right"
__orientation["tablet"]["portrait"] = "natural"
__orientation["tablet"]["reverse-portrait"] = "left"
__orientation["phone"]["landscape"] = "left"
__orientation["phone"]["portrait"] = "natural"
__orientation["phone"]["reverse-landscape"] = "right"
def __init__(self, orientation="portrait", **kwargs):
ui_step.__init__(self, **kwargs)
self.orientation = orientation
def do(self):
# set the orientation to 'natural'
ui_steps.set_orientation(
serial=self.serial, orientation="portrait", target="phone")()
# set the orientation to 'portrait'
self.device_type = adb_utils.get_device_orientation_type(
serial=self.serial)
ui_steps.set_orientation(
serial=self.serial, orientation=self.orientation, target=self.device_type)()
def check_condition(self):
return self.uidevice.orientation == self.__orientation[
self.device_type][self.orientation]
class block_device(ui_step):
""" description:
unlocks DUT with wrong PIN 5 times in a row
usage:
ui_steps.block_device(pin = "2222")()
tags:
ui, android, PIN
"""
def __init__(self, pin="2222", **kwargs):
self.pin = pin
ui_step.__init__(self, **kwargs)
def do(self):
# enter wrong pin 5 times
ui_steps.put_device_into_sleep_mode(serial=self.serial)()
time.sleep(2)
ui_steps.wake_up_device(serial=self.serial)()
ui_steps.unlock_device_swipe(serial=self.serial)()
for i in range(0, 5):
ui_steps.unlock_device_pin(
serial=self.serial, pin=self.pin, wrong_pin=True)()
def check_condition(self):
return "You have incorrectly typed your PIN 5 times." in \
self.uidevice(resourceId="android:id/message").info["text"]
class block_device_at_boot_time(ui_step):
""" description:
enters wrong PIN 10 times in a row at boot time
usage:
ui_steps.block_device_at_boot_time()()
tags:
ui, android, PIN
"""
def __init__(self, pin="2222", **kwargs):
self.pin = pin
ui_step.__init__(self, **kwargs)
def do(self):
for i in range(0, 10):
ui_steps.wait_for_view(serial=self.serial,
view_to_find={"resourceId": "com.android.settings:id/passwordEntry", "enabled":
"true"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"resourceId":
"com.android.settings:id/passwordEntry"},
is_password=True, value=self.pin)()
# press enter keycode
self.uidevice.press("enter")
def check_condition(self):
return self.uidevice(
textContains="To unlock your tablet").wait.exists(timeout=5000)
class create_new_user(ui_step):
""" description:
Creates new user
usage:
ui_steps.create(user_name = "USER"()()
tags:
ui, android, create, user
"""
def __init__(self, user_name=None, set_up_user=False, **kwargs):
self.user_name = user_name
self.set_up_user = set_up_user
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_users_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Add user or profile"},
view_to_check={"textContains": "Users have their own"})()
ui_steps.click_button(serial=self.serial, view_to_find={"textContains": "Users have their own"},
view_to_check={"textContains": "Add new user?"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "OK"},
view_to_check={"textContains": "Set up user now?"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Not now"},
view_to_check={"textContains": "Not set up"})()
if self.set_up_user:
ui_steps.set_up_user(serial=self.serial,
user_name=self.user_name)()
def check_condition(self):
if not self.set_up_user:
ui_steps.wait_for_view(serial=self.serial,
view_to_find={"text": "New user"})()
return True
class remove_user(ui_step):
""" description:
Deletes new user
usage:
ui_steps.remove_user(user_name = "USER"()()
tags:
ui, android, delete, user
"""
def __init__(self, user_name, check_condition=True, **kwargs):
self.user_name = user_name
ui_step.__init__(self, **kwargs)
self.perform_check_condition = check_condition
self.optional = False
self.step_data = False
def do(self):
ui_steps.open_users_settings(serial=self.serial)()
if not ui_steps.wait_for_view_common(serial=self.serial,
view_to_find={"text": self.user_name}, optional=True)():
return
if not ui_steps.click_button_common(serial=self.serial, view_to_find={"text": self.user_name},
second_view_to_find={
"descriptionContains": "Delete user"},
view_to_check={"textContains": "Remove this user?"}, optional=True)():
ui_steps.click_button_common(serial=self.serial,
view_to_find={"descriptionContains": "more"})()
ui_steps.click_button_common(serial=self.serial,
view_to_find={"textContains": "Delete"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "DELETE"})()
self.step_data = True
def check_condition(self):
if self.optional and not self.step_data:
return self.step_data
if self.step_data and self.perform_check_condition:
return not ui_steps.wait_for_view_common(serial=self.serial,
view_to_find={"text": self.user_name}, optional=True)()
else:
return
class set_up_user(ui_step):
""" description:
Deletes new user
usage:
ui_steps.set_up_user(user_name = "USER"()()
tags:
ui, android, switch, user
"""
def __init__(self, user_name, **kwargs):
self.user_name = user_name
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_users_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Not set up"},
view_to_check={"textContains": "Set up user now?"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Set up now"})()
time.sleep(10)
ui_steps.wake_up_device(serial=self.serial)()
ui_steps.unlock_device(serial=self.serial)()
ui_steps.perform_startup_wizard_for_new_user(
serial=self.serial, user_name="New user")()
def check_condition(self):
return True
class switch_user(ui_step):
""" description:
Deletes new user
usage:
ui_steps.switch_user(user_name = "USER"()()
tags:
ui, android, switch, user
"""
def __init__(self, user_name, **kwargs):
self.user_name = user_name
ui_step.__init__(self, **kwargs)
def do(self):
ui_steps.open_users_settings(serial=self.serial)()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": self.user_name})()
time.sleep(10)
ui_steps.put_device_into_sleep_mode(serial=self.serial)()
ui_steps.wake_up_device(serial=self.serial)()
ui_steps.unlock_device(serial=self.serial)()
def check_condition(self):
# Check if user switched
ui_steps.open_users_settings(serial=self.serial)()
ui_steps.wait_for_view(serial=self.serial,
view_to_find={"text": "You (" + self.user_name + ")"})()
return True
class add_trusted_location(ui_step):
""" description:
Adds a trusted location (Smart lock)
usage:
ui_steps.add_trusted_location(location_name = "Test location"()()
tags:
ui, android, switch, user
"""
def __init__(self, location_name, pin, wait_time=30000, **kwargs):
self.location_name = location_name
self.pin = pin
ui_step.__init__(self, **kwargs)
self.wait_time = wait_time
def do(self):
ui_steps.open_smart_lock_settings(serial=self.serial, pin=self.pin)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Trusted places"},
view_to_check={"text": "Add trusted place"})()
ui_steps.click_button(
serial=self.serial, view_to_find={"text": "Add trusted place"})()
if self.uidevice(text="Use location?").wait.exists(timeout=self.wait_time):
ui_steps.click_button(
serial=self.serial,
view_to_find={
"text": "Yes"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"resourceId":
"com.google.android.gms:id/marker_map_my_location_button"})()
if self.uidevice(text="Select this location").wait.exists(timeout=self.wait_time):
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Select this location"},
view_to_check={"resourceId": "com.google.android.gms:id/trusted_place_name"})()
ui_steps.edit_text(serial=self.serial, view_to_find={"resourceId":
"com.google.android.gms:id/trusted_place_name"},
value=self.location_name)()
ui_steps.click_button(
serial=self.serial,
view_to_find={
"text": "OK"})()
def check_condition(self):
# Check if user switched
ui_steps.open_smart_lock_settings(serial=self.serial, pin=self.pin)()
return self.uidevice(text=self.location_name).wait.exists(
timeout=self.wait_time)
class remove_trusted_location(ui_step):
""" description:
Adds a trusted location (Smart lock)
usage:
ui_steps.add_trusted_location(location_name = "Test location"()()
tags:
ui, android, switch, user
"""
def __init__(self, location_name, pin, wait_time=5000, **kwargs):
self.location_name = location_name
self.pin = pin
ui_step.__init__(self, **kwargs)
if wait_time:
self.wait_time = int(wait_time)
else:
self.wait_time = 5000
def do(self):
ui_steps.open_smart_lock_settings(serial=self.serial, pin=self.pin)()
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Trusted places"},
view_to_check={"text": "Add trusted place"})()
if self.device_info.dessert == "M":
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.location_name},
view_to_check={"resourceId": "com.google.android.gms:id/trusted_places_"
"custom_places_menu_delete_this_location"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"resourceId": "com.google.android.gms:id/trusted_places_custom"
"_places_menu_delete_this_location"},
view_to_check={"text": "Add trusted place"})()
elif self.device_info.dessert == "L":
ui_steps.click_button(serial=self.serial, view_to_find={"text": self.location_name},
view_to_check={"text": self.location_name})()
if ui_utils.is_display_direction_landscape(serial=self.serial):
ui_steps.click_xy(serial=self.serial, x=self.uidevice.info["displayWidth"] / 2,
y=self.uidevice.info[
"displayHeight"] * self.device_info
.remove_trusted_location_horizontal_percentage)()
else:
ui_steps.click_xy(serial=self.serial, x=self.uidevice.info["displayWidth"] / 2,
y=self.uidevice.info[
"displayHeight"] * self.device_info
.remove_trusted_location_vertical_percentage)()
def check_condition(self):
# Check if user switched
ui_steps.open_smart_lock_settings(serial=self.serial, pin=self.pin)()
return not self.uidevice(text=self.location_name).wait.exists(
timeout=self.wait_time)
class add_trusted_device(ui_step):
""" description:
Adds a trusted device (Smart lock)
usage:
ui_steps.add_trusted_device(device_name = <device_name>)()
tags:
ui, android, switch, user
"""
def __init__(self, device_name, pin, wait_time=30000, **kwargs):
self.device_name = device_name
self.pin = pin
ui_step.__init__(self, **kwargs)
self.wait_time = wait_time
def do(self):
ui_steps.open_smart_lock_settings(serial=self.serial,
pin=self.pin)()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Trusted devices"},
view_to_check={"text": "Add trusted device"})()
ui_steps.click_button(serial=self.serial,
view_to_find={"text": "Add trusted device"},
view_to_check={"text": "Choose device"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"text": self.device_name}, view_to_check={"text": "YES, ADD"})()
ui_steps.click_button(serial=self.serial, view_to_find={
"text": "YES, ADD"}, view_to_check={"text": "Connected"})()
def check_condition(self):
# Check if user switched
ui_steps.open_smart_lock_settings(serial=self.serial, pin=self.pin)()
return self.uidevice(text=self.device_name).wait.exists(
timeout=self.wait_time)
class set_date_and_time(ui_step):
def __init__(
self, year, day, ntp_switch_state_value, wait_time=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.year = year
self.day = day
self.ntp_switch_state_value = ntp_switch_state_value
self.wait_time = wait_time
self.step_data = None
def do(self):
ui_steps.open_settings_app(serial=self.serial, view_to_find={"text": "Date & time"},
view_to_check={"text": "Automatic date & time"})()
if self.uidevice(resourceId="android:id/switchWidget", instance=0).info["text"] == "ON":
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Automatic date & time"},
view_to_check={"text": "Automatic time zone"})()
if self.uidevice(resourceId="android:id/switchWidget", instance=1).info["text"] == "ON":
ui_steps.click_button(serial=self.serial, view_to_find={"text": "Automatic time zone"},
view_to_check={"text": "Set date"})()
views_to_click = [{"text": "Set date"},
{"resourceId": "android:id/date_picker_header_year"},
{"text": self.year},
{"resourceId": "android:id/prev"},
{"text": self.day},
{"text": "OK"},
{"text": "Set time"},
{"index": "11"},
{"index": "11"},
{"text": "OK"}]
views_to_check = [{"text": "OK"},
{"text": "OK"},
{"text": "OK"},
{"text": "OK"},
{"text": "OK"},
{"text": "Set date"},
{"text": "OK"},
{"text": "OK"},
{"text": "OK"},
{"text": "Set time"}]
for i in range(len(views_to_click)):
ui_steps.click_button(serial=self.serial, view_to_find=views_to_click[
i], view_to_check=views_to_check[i])()
def check_condition(self):
# Check performed in do()
return True
class enable_disable_auto_time_date(ui_step):
""" description:
Enables or disables the auto time and date option in settings
usage:
ui_steps.enable_disable_auto_time_date(serial=serial,
enable=True/False)()
tags:
ui, android, enable, disable, time, date
"""
def __init__(self, serial, enable=True, **kwargs):
self.serial = serial
self.enable = enable
ui_step.__init__(self, serial=self.serial, **kwargs)
self.set_errorm("", "Could not {0} auto time and date".format(
"enable" if self.enable else "disable"))
self.set_passm("Successfully {0} auto time and date".format(
"enabled" if self.enable else "disabled"))
def do(self):
# Open the app and go to date and time
ui_steps.open_settings_app(serial=self.serial,
view_to_find={"text": "Date & time"},
view_to_check={
"text": "Automatic date & time"},
wait_time=3000)()
# Define the checkbox used for the step
self.auto_time_checkbox = self.uidevice(className="android.widget.ListView", resourceId="android:id/list").\
child_by_text("Automatic date & time", className="android.widget.LinearLayout").\
child(className="android.widget.Switch")
# If the option should be enabled and it isn't, click to enable
# If the option is enabled and the parameter is set to false, click to
# disable
if self.enable and not self.auto_time_checkbox.info["checked"]:
self.auto_time_checkbox.click()
elif not self.enable and self.auto_time_checkbox.info["checked"]:
self.auto_time_checkbox.click()
def check_condition(self):
# If the option should be enabled and it isn't, return false (step
# fails)
if self.enable and not self.auto_time_checkbox.info["checked"]:
return False
# If the option should not be enabled and it is, fail the step
if not self.enable and self.auto_time_checkbox.info["checked"]:
return False
return True
class enable_disable_auto_timezone(ui_step):
""" description:
Enables or disables the timezone switch button from Date & time settings
usage:
ui_steps.set_automatic_timezone(serial=serial,
time_zone_switch_value=True for "ON"/ False for "OFF")()
tags:
ui, android, enable, disable, timezone
"""
def __init__(self, time_zone_switch_value, **kwargs):
ui_step.__init__(self, **kwargs)
self.time_zone_switch_value = time_zone_switch_value
self.set_errorm("", "Could not {0} auto timezone".
format("enable" if self.time_zone_switch_value else "disable"))
self.set_passm("Successfully {0} auto timezone"
.format("enabled" if self.time_zone_switch_value else "disabled"))
def do(self):
# Define the checkbox used for the step
self.auto_timezone_checkbox = self.uidevice(className="android.widget.ListView", resourceId="android:id/list")\
.child_by_text("Automatic time zone", className="android.widget.LinearLayout")\
.child(className="android.widget.Switch")
# If the option should be enabled and it isn't, click to enable
# If the option is enabled and the parameter is set to false, click to
# disable
if self.time_zone_switch_value and not self.auto_timezone_checkbox.info["checked"]:
self.auto_timezone_checkbox.click()
elif not self.time_zone_switch_value and self.auto_timezone_checkbox.info["checked"]:
self.auto_timezone_checkbox.click()
def check_condition(self):
# If the option should be enabled and it isn't, return false (step
# fails)
if self.time_zone_switch_value and not self.auto_timezone_checkbox.info["checked"]:
return False
# If the option should not be enabled and it is, fail the step
if not self.time_zone_switch_value and self.auto_timezone_checkbox.info["checked"]:
return False
return True
class press_map(ui_step):
""" description:
Open car map application
usage:
ui_steps.press_map(serial=serial)()
tags:
ui, android, map, ivi
"""
def check_view(self):
if self.uidevice(**self.view_to_check).wait.exists(timeout=self.timeout):
return True
if self.uidevice(**{"text": "Google Maps", "packageName": "com.google.android.projection.gearhead"})\
.wait.exists(timeout=self.timeout):
return True
return False
def __init__(self, view_to_check={"resourceId": "com.android.support.car.lenspicker: id/stream_card",
"packageName": "com.android.support.car.lenspicker"}, timeout=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_check = view_to_check
self.timeout = timeout
self.set_errorm("", "Could not press car map")
self.set_passm("Successfully opened car map")
self.step_data = False
def do(self):
if self.device_info.device_type == "tab":
self.logger.error("Unsupported API, as it only support in "
"greater than or equal to Android O IVI "
"platform")
else:
# Todo need to replace pixel with uiobject
# currently pixel is used because activity is not dumped through
# uiautomator
info = self.uidevice.info
x = info['displaySizeDpX']
y = info['displaySizeDpY']
# In activity bar, 7 options are there.
activity_bar_single_element_width = x / 7
# map resides at 2 position and click has to be done at the center
map_x_coordinate = activity_bar_single_element_width * \
2 - activity_bar_single_element_width / 2
# Default acitivity bar resides at the bottom, so y coordinate
# can be used and to click at the center reducing the value by 30
map_y_coordinate = y - 30
for i in range(0, 5):
# cmd = "input tap 405 1050"
cmd = "input tap {0} {1}".format(
map_x_coordinate, map_y_coordinate)
adb_connection = Adb(serial=self.serial)
adb_connection.run_cmd(cmd)
time.sleep(2)
if self.check_view():
self.step_data = True
break
def check_condition(self):
return self.step_data
class press_dialer(ui_step):
""" description:
Open car dialer application
usage:
ui_steps.press_dialer(serial=serial)()
tags:
ui, android, dialer, ivi
"""
def __init__(self, timeout=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.timeout = timeout
self.set_errorm("", "Could not open car dialer")
self.set_passm("Successfully opened car dialer")
self.step_data = False
def do(self):
if self.device_info.device_type == "tab":
self.logger.error("Unsupported API, as it only support in greater than or equal to Android O IVI "
"platform")
else:
# Todo need to replace pixel with uiobject
# currently pixel is used because activity is not dumped through
# uiautomator
info = self.uidevice.info
x = info['displaySizeDpX']
y = info['displaySizeDpY']
# In activity bar, 7 options are there.
activity_bar_single_element_width = x / 7
# dialer resides at 3 position and click has to be done at the
# center
dialer_x_coordinate = activity_bar_single_element_width * \
3 - activity_bar_single_element_width / 2
# Default acitivity bar resides at the bottom, so y coordinate
# can be used and to click at the center reducing the value by 30
dialer_y_coordinate = y - 30
cmd = "input tap {0} {1}".format(
dialer_x_coordinate, dialer_y_coordinate)
adb_connection = Adb(serial=self.serial)
adb_subprocess_object = adb_connection.run_cmd(cmd)
if adb_subprocess_object.poll() == 0:
self.step_data = True
def check_condition(self):
if self.step_data is True:
self.step_data = ui_steps.wait_for_view(serial=self.serial,
view_to_find={
"text": "Phone", "packageName": "com.android.car.dialer"},
timeout=self.timeout)()
return self.step_data
class press_media(ui_step):
""" description:
Open car media application and shows app picker
usage:
ui_steps.press_dialer(serial=serial)()
tags:
ui, android, dialer, ivi
"""
def check_view(self):
return self.uidevice(**self.view_to_check).wait.exists(
timeout=self.timeout)
def __init__(self, view_to_check={"resourceId": "com.android.support.car.lenspicker: id/stream_card",
"packageName": "com.android.support.car.lenspicker"}, timeout=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_check = view_to_check
self.timeout = timeout
self.set_errorm("", "Could not open car media")
self.set_passm("Successfully opened car media")
self.step_data = False
def do(self):
if self.device_info.device_type == "tab":
self.logger.error("Unsupported API, as it only support in "
"greater than or equal to Android O IVI "
"platform")
else:
# Todo need to replace pixel with uiobject
# currently pixel is used because activity is not dumped through
# uiautomator
info = self.uidevice.info
x = info['displaySizeDpX']
y = info['displaySizeDpY']
# In activity bar, 7 options are there.
activity_bar_single_element_width = x / 7
# media resides at 5 position and click has to be done at the
# center
media_x_coordinate = activity_bar_single_element_width * \
5 - activity_bar_single_element_width / 2
# Default acitivity bar resides at the bottom, so y coordinate
# can be used and to click at the center reducing the value by 30
media_y_coordinate = y - 30
for i in range(0, 5):
cmd = "input tap {0} {1}".format(
media_x_coordinate, media_y_coordinate)
adb_connection = Adb(serial=self.serial)
adb_connection.run_cmd(cmd)
time.sleep(1)
if self.check_view():
self.step_data = True
break
def check_condition(self):
return self.step_data
class press_car(ui_step):
""" description:
Open car application and shows app picker
usage:
ui_steps.press_dialer(serial=serial)()
tags:
ui, android, dialer, ivi
"""
def check_view(self):
return self.uidevice(**self.view_to_check).wait.exists(
timeout=self.timeout)
def __init__(self, view_to_check={"resourceId": "com.android.support.car.lenspicker:id/stream_card",
"packageName": "com.android.support.car.lenspicker"}, timeout=5000, **kwargs):
ui_step.__init__(self, **kwargs)
self.view_to_check = view_to_check
self.timeout = timeout
self.set_errorm("", "Could not open car ")
self.set_passm("Successfully opened car")
self.step_data = False
def do(self):
if self.device_info.device_type == "tab":
self.logger.error("Unsupported API, as it only support in "
"greater than or equal to Android O IVI "
"platform")
else:
# Todo need to replace pixel with uiobject
# currently pixel is used because activity is not dumped through
# uiautomator
info = self.uidevice.info
x = info['displaySizeDpX']
y = info['displaySizeDpY']
# In activity bar, 7 options are there.
activity_bar_single_element_width = x / 7
# car resides at 7 position and click has to be done at the center
car_x_coordinate = activity_bar_single_element_width * \
6 - activity_bar_single_element_width / 2
# Default acitivity bar resides at the bottom, so y coordinate
# can be used and to click at the center reducing the value by 30
car_y_coordinate = y - 30
for i in range(0, 5):
cmd = "input tap {0} {1}".format(
car_x_coordinate, car_y_coordinate)
adb_connection = Adb(serial=self.serial)
adb_connection.run_cmd(cmd)
time.sleep(1)
if self.check_view():
self.step_data = True
break
def check_condition(self):
return self.step_data
|
__author__ = 'efelix'
from rdkit import Chem
from beaker.utils.functional import _apply, _call
from beaker.utils.io import _parseMolData
import beaker.utils.chemical_transformation as ct
import copy
import json
import os.path as path
data_dir = path.abspath(path.join(__file__ , "../../../../data"))
with open('{}/chembl_26_alerts.json'.format(data_dir), 'r') as alerts_file:
alerts = json.load(alerts_file)
for alert in alerts:
alert['rdmol'] = Chem.MolFromSmarts(alert['smarts'])
#-----------------------------------------------------------------------------------------------------------------------
def get_matches(mol):
_call([mol], 'UpdatePropertyCache', strict=False)
_apply([mol], ct._sssr)
matches = []
for alert in alerts:
try:
match = mol.HasSubstructMatch(alert['rdmol'])
if match:
malert = copy.deepcopy(alert)
del malert['rdmol']
matches.append(malert)
except Exception as e:
print(e)
return matches
#-----------------------------------------------------------------------------------------------------------------------
def _get_alerts(data, loadMol=True, useRDKitChemistry=False):
mols = _parseMolData(data, loadMol=loadMol, useRDKitChemistry=useRDKitChemistry)
res = _apply(mols, get_matches)
return json.dumps(res)
#-----------------------------------------------------------------------------------------------------------------------
|
import logging.config
import os
from typing import Tuple
from kbc_evaluation.dataset import DataSet, ParsedSet
logconf_file = os.path.join(os.path.dirname(__file__), "log.conf")
logging.config.fileConfig(fname=logconf_file, disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class EvaluatorResult:
"""Object holding the results of the evaluation process"""
def __init__(
self,
evaluated_file: str,
test_set_size: int,
n: int,
filtered_hits_at_n_heads: int,
filtered_hits_at_n_tails: int,
filtered_hits_at_n_all: int,
filtered_mean_rank_heads: int,
filtered_mean_rank_tails: int,
filtered_mean_rank_all: int,
filtered_reciprocal_mean_rank_heads: float,
filtered_reciprocal_mean_rank_tails: float,
filtered_reciprocal_mean_rank_all: float,
non_filtered_hits_at_n_heads: int,
non_filtered_hits_at_n_tails: int,
non_filtered_hits_at_n_all: int,
non_filtered_mean_rank_heads: int,
non_filtered_mean_rank_tails: int,
non_filtered_mean_rank_all: int,
non_filtered_reciprocal_mean_rank_heads: float,
non_filtered_reciprocal_mean_rank_tails: float,
non_filtered_reciprocal_mean_rank_all: float,
):
# setting the general variables
self.evaluated_file = evaluated_file
self.test_set_size = test_set_size
self.n = n
# setting the filtered results
self.filtered_hits_at_n_heads = filtered_hits_at_n_heads
self.filtered_hits_at_n_tails = filtered_hits_at_n_tails
self.filtered_hits_at_n_all = filtered_hits_at_n_all
self.filtered_hits_at_n_relative = self.filtered_hits_at_n_all / (
2 * test_set_size
)
self.filtered_mean_rank_heads = filtered_mean_rank_heads
self.filtered_mean_rank_tails = filtered_mean_rank_tails
self.filtered_mean_rank_all = filtered_mean_rank_all
self.filtered_reciprocal_mean_rank_heads = filtered_reciprocal_mean_rank_heads
self.filtered_reciprocal_mean_rank_tails = filtered_reciprocal_mean_rank_tails
self.filtered_reciprocal_mean_rank_all = filtered_reciprocal_mean_rank_all
# setting the non-filtered results
self.non_filtered_hits_at_n_heads = non_filtered_hits_at_n_heads
self.non_filtered_hits_at_n_tails = non_filtered_hits_at_n_tails
self.non_filtered_hits_at_n_all = non_filtered_hits_at_n_all
self.non_filtered_hits_at_n_relative = self.non_filtered_hits_at_n_all / (
2 * test_set_size
)
self.non_filtered_mean_rank_heads = non_filtered_mean_rank_heads
self.non_filtered_mean_rank_tails = non_filtered_mean_rank_tails
self.non_filtered_mean_rank_all = non_filtered_mean_rank_all
self.non_filtered_reciprocal_mean_rank_heads = (
non_filtered_reciprocal_mean_rank_heads
)
self.non_filtered_reciprocal_mean_rank_tails = (
non_filtered_reciprocal_mean_rank_tails
)
self.non_filtered_reciprocal_mean_rank_all = (
non_filtered_reciprocal_mean_rank_all
)
class EvaluationRunner:
"""This class calculates evaluation scores for a single file."""
def __init__(
self,
file_to_be_evaluated: str,
data_set: DataSet,
is_apply_filtering: bool = False,
):
"""Constructor
Parameters
----------
file_to_be_evaluated : str
Path to the text file with the predicted links that shall be evaluated.
data_set : DataSet
The dataset for which predictions have been made.
is_apply_filtering : bool
Indicates whether filtering is desired (if True, results will likely improve).
"""
self._file_to_be_evaluated = file_to_be_evaluated
self._is_apply_filtering = is_apply_filtering
if file_to_be_evaluated is None or not os.path.isfile(file_to_be_evaluated):
logging.error(
f"The evaluator will not work because the specified file "
f"does not exist {file_to_be_evaluated}"
)
raise Exception(
f"The specified file ({file_to_be_evaluated}) does not exist."
)
self.parsed = ParsedSet(
is_apply_filtering=self._is_apply_filtering,
file_to_be_evaluated=self._file_to_be_evaluated,
data_set=data_set,
)
def mean_rank(self) -> Tuple[int, int, int, float, float, float]:
"""Calculates the mean rank and mean reciprocal rank using the given file.
Returns
-------
Tuple[int, int, int, float, float, float]
The first three elements are for MR, the last three for MRR.
[0] Mean rank as int for heads (rounded float).
[1] Mean rank as int for tails (rounded float).
[2] Mean rank as int (rounded float).
[3] Mean reciprocal rank as float for heads.
[4] Mean reciprocal as float for tails.
[5] Mean reciprocal as float.
"""
logger.info("Calculating Mean Rank")
ignored_heads = 0
ignored_tails = 0
head_rank = 0
tail_rank = 0
reciprocal_head_rank = 0
reciprocal_tail_rank = 0
for truth, prediction in self.parsed.triple_predictions.items():
try:
h_index = (
prediction[0].index(truth[0]) + 1
) # (first position has index 0)
head_rank += h_index
reciprocal_head_rank += 1.0 / h_index
except ValueError:
logging.error(
f"ERROR: Failed to retrieve head predictions for (correct) head concept: {truth[0]} "
f"Triple: {truth}"
)
ignored_heads += 1
try:
t_index = (
prediction[1].index(truth[2]) + 1
) # (first position has index 0)
tail_rank += t_index
reciprocal_tail_rank += 1.0 / t_index
except ValueError:
logging.error(
f"ERROR: Failed to retrieve tail predictions for (correct) tail concept: {truth[2]} "
f"Triple: {truth}"
)
ignored_tails += 1
mean_head_rank = 0
mean_tail_rank = 0
mean_reciprocal_head_rank = 0
mean_reciprocal_tail_rank = 0
total_tasks = self.parsed.total_prediction_tasks
if total_tasks - ignored_heads > 0:
denominator = total_tasks / 2.0 - ignored_heads
mean_head_rank = head_rank / denominator
mean_reciprocal_head_rank = reciprocal_head_rank / denominator
if total_tasks / 2 - ignored_tails > 0:
denominator = total_tasks / 2 - ignored_tails
mean_tail_rank = tail_rank / denominator
mean_reciprocal_tail_rank = reciprocal_tail_rank / denominator
logging.info(
f"Mean Head Rank: {mean_head_rank} ({ignored_heads} ignored lines)\n"
+ f"Mean Reciprocal Head Rank: {mean_reciprocal_head_rank} ({ignored_heads} ignored lines)"
)
logging.info(
f"Mean Tail Rank: {mean_tail_rank} ({ignored_tails} ignored lines)\n"
+ f"Mean Reciprocal Tail Rank: {mean_reciprocal_tail_rank} ({ignored_tails} ignored lines)"
)
mean_rank = 0
mean_reciprocal_rank = 0
if (total_tasks - ignored_tails - ignored_heads) > 0:
single_tasks = total_tasks / 2
mean_rank = (
head_rank / (single_tasks - ignored_heads)
+ tail_rank / (single_tasks - ignored_tails)
) / 2
total_completed_tasks = total_tasks - ignored_tails - ignored_heads
mean_reciprocal_rank = (
mean_reciprocal_head_rank
* ((single_tasks - ignored_heads) / total_completed_tasks)
+ mean_reciprocal_tail_rank
* (single_tasks - ignored_tails)
/ total_completed_tasks
)
mean_rank_rounded = round(mean_rank)
logging.info(f"Mean rank: {mean_rank}; rounded: {mean_rank_rounded}")
logging.info(f"Mean reciprocal rank: {mean_reciprocal_rank}")
return (
round(mean_head_rank),
round(mean_tail_rank),
mean_rank_rounded,
mean_reciprocal_head_rank,
mean_reciprocal_tail_rank,
mean_reciprocal_rank,
)
def calculate_hits_at(self, n: int = 10) -> Tuple[int, int, int]:
"""Calculation of hits@n.
Parameters
----------
n : int
Hits@n. This parameter specifies the n.
Returns
-------
Tuple[int, int, int]
[0] Hits at n only for heads.
[1] Hits at n only for tails.
[2] The hits at n. Note that head hits and tail hits are added.
"""
heads_hits = 0
tails_hits = 0
for truth, prediction in self.parsed.triple_predictions.items():
# perform the actual evaluation
if truth[0] in prediction[0][:n]:
heads_hits += 1
if truth[2] in prediction[1][:n]:
tails_hits += 1
result = heads_hits + tails_hits
logging.info(f"Hits@{n} Heads: {heads_hits}")
logging.info(f"Hits@{n} Tails: {tails_hits}")
logging.info(f"Hits@{n} Total: {result}")
return heads_hits, tails_hits, result
class Evaluator:
"""This class provides powerful evaluation reporting capabilities."""
@staticmethod
def calculate_results(
file_to_be_evaluated: str,
data_set: DataSet,
n: int = 10,
) -> EvaluatorResult:
"""Given the file_to_be_evaluated and a data_set, this method calculates hits at n.
Parameters
----------
file_to_be_evaluated : str
data_set : DataSet
n : int
Hits@n. This parameter specifies the n. Default value 10.
Returns
-------
EvaluatorResult
The result data structure.
"""
evaluator = EvaluationRunner(
file_to_be_evaluated=file_to_be_evaluated,
is_apply_filtering=False,
data_set=data_set,
)
non_filtered_hits_at_10 = evaluator.calculate_hits_at(n)
test_set_size = len(data_set.test_set())
non_filtered_mr = evaluator.mean_rank()
evaluator = EvaluationRunner(
file_to_be_evaluated=file_to_be_evaluated,
is_apply_filtering=True,
data_set=data_set,
)
filtered_hits_at_10 = evaluator.calculate_hits_at(n)
filtered_mr = evaluator.mean_rank()
return EvaluatorResult(
evaluated_file=file_to_be_evaluated,
test_set_size=test_set_size,
n=n,
filtered_hits_at_n_heads=filtered_hits_at_10[0],
filtered_hits_at_n_tails=filtered_hits_at_10[1],
filtered_hits_at_n_all=filtered_hits_at_10[2],
filtered_mean_rank_heads=filtered_mr[0],
filtered_mean_rank_tails=filtered_mr[1],
filtered_mean_rank_all=filtered_mr[2],
filtered_reciprocal_mean_rank_heads=filtered_mr[3],
filtered_reciprocal_mean_rank_tails=filtered_mr[4],
filtered_reciprocal_mean_rank_all=filtered_mr[5],
non_filtered_hits_at_n_heads=non_filtered_hits_at_10[0],
non_filtered_hits_at_n_tails=non_filtered_hits_at_10[1],
non_filtered_hits_at_n_all=non_filtered_hits_at_10[2],
non_filtered_mean_rank_heads=non_filtered_mr[0],
non_filtered_mean_rank_tails=non_filtered_mr[1],
non_filtered_mean_rank_all=non_filtered_mr[2],
non_filtered_reciprocal_mean_rank_heads=non_filtered_mr[3],
non_filtered_reciprocal_mean_rank_tails=non_filtered_mr[4],
non_filtered_reciprocal_mean_rank_all=non_filtered_mr[5],
)
@staticmethod
def write_result_object_to_file(
file_to_be_written: str,
result_object: EvaluatorResult,
) -> None:
non_filtered_text = (
f"\nThis is the evaluation of file {result_object.evaluated_file}\n\n"
+ "Non-filtered Results\n"
+ "--------------------\n"
+ f"Test set size: {result_object.test_set_size}\n"
+ f"Hits at {result_object.n} (Heads): {result_object.non_filtered_hits_at_n_heads}\n"
+ f"Hits at {result_object.n} (Tails): {result_object.non_filtered_hits_at_n_tails}\n"
+ f"Hits at {result_object.n} (All): {result_object.non_filtered_hits_at_n_all}\n"
+ f"Relative Hits at {result_object.n}: {result_object.non_filtered_hits_at_n_relative}\n"
+ f"Mean rank (Heads): {result_object.non_filtered_mean_rank_heads}\n"
+ f"Mean rank (Tails): {result_object.non_filtered_mean_rank_tails}\n"
+ f"Mean rank (All): {result_object.non_filtered_mean_rank_all}\n"
+ f"Mean reciprocal rank (Heads): {result_object.non_filtered_reciprocal_mean_rank_heads}\n"
+ f"Mean reciprocal rank (Tails): {result_object.non_filtered_reciprocal_mean_rank_tails}\n"
+ f"Mean reciprocal rank (All): {result_object.non_filtered_reciprocal_mean_rank_all}\n"
)
filtered_text = (
"\nFiltered Results\n"
+ "----------------\n"
+ f"Test set size: {result_object.test_set_size}\n"
+ f"Hits at {result_object.n} (Heads): {result_object.filtered_hits_at_n_heads}\n"
+ f"Hits at {result_object.n} (Tails): {result_object.filtered_hits_at_n_tails}\n"
+ f"Hits at {result_object.n} (All): {result_object.filtered_hits_at_n_all}\n"
+ f"Relative Hits at {result_object.n}: {result_object.filtered_hits_at_n_relative}\n"
+ f"Mean rank (Heads): {result_object.filtered_mean_rank_heads}\n"
+ f"Mean rank (Tails): {result_object.filtered_mean_rank_tails}\n"
+ f"Mean rank (All): {result_object.filtered_mean_rank_all}\n"
+ f"Mean reciprocal rank (Heads): {result_object.filtered_reciprocal_mean_rank_heads}\n"
+ f"Mean reciprocal rank (Tails): {result_object.filtered_reciprocal_mean_rank_tails}\n"
+ f"Mean reciprocal rank (All): {result_object.filtered_reciprocal_mean_rank_all}\n"
)
with open(file_to_be_written, "w+", encoding="utf8") as f:
f.write(non_filtered_text + "\n")
f.write(filtered_text)
logger.info(non_filtered_text + "\n" + filtered_text)
@staticmethod
def write_results_to_file(
file_to_be_evaluated: str,
data_set: DataSet,
file_to_be_written: str = "./results.txt",
) -> None:
"""Executes a filtered and non-filtered evaluation and prints the results to the console and to a file.
Parameters
----------
file_to_be_evaluated : str
File path to the file that shall be evaluated.
data_set : DataSet
The data set that is under evaluation.
file_to_be_written : str
File path to the file that shall be written.
"""
# calculate the results
results = Evaluator.calculate_results(
file_to_be_evaluated=file_to_be_evaluated,
data_set=data_set,
)
# write the results to the specified file
Evaluator.write_result_object_to_file(
file_to_be_written=file_to_be_written,
result_object=results,
)
|
# Copyright (c) 2016-2018, University of Idaho
# All rights reserved.
#
# Roger Lew (rogerlew@gmail.com)
#
# The project described was supported by NSF award number IIA-1301792
# from the NSF Idaho EPSCoR Program and by the National Science Foundation.
from os.path import join as _join
from os.path import exists as _exists
import requests
from posixpath import join as urljoin
from wepppy.soils.ssurgo import SoilSummary
_soilbuilder_url = "https://wepp1.nkn.uidaho.edu/webservices/weppsoilbuilder/"
def validatemukeys(mukeys):
global _soilbuilder_url
r = requests.post(urljoin(_soilbuilder_url, 'validatemukeys'),
params=dict(mukeys=','.join([str(v) for v in mukeys])))
if r.status_code != 200:
raise Exception("Encountered error retrieving from weppsoilbuilder")
# noinspection PyBroadException
try:
_json = r.json()
except Exception:
_json = None
if _json is None:
raise Exception("Cannot parse json from weppsoilbuilder response")
return _json
def fetchsoils(mukeys, dst_dir):
"""
retrieves soils from a wepppy.webservices.weppsoilbuilder webservice
"""
global _soilbuilder_url
assert _exists(dst_dir)
r = requests.post(urljoin(_soilbuilder_url, 'fetchsoils'),
params=dict(mukeys=','.join([str(v) for v in mukeys])))
if r.status_code != 200:
raise Exception("Encountered error retrieving from weppsoilbuilder")
# noinspection PyBroadException
try:
_json = r.json()
except Exception:
_json = None
if _json is None:
raise Exception("Cannot parse json from weppsoilbuilder response")
for data in _json:
fn = _join(dst_dir, data['FileName'])
contents = data['FileContents']
data['soils_dir'] = dst_dir
with open(fn, 'w') as fp:
fp.write(contents)
return dict(zip(mukeys, [SoilSummary(**data) for data in _json]))
if __name__ == "__main__":
from pprint import pprint
_mukeys = [100000, 100001, 100003, 400016,
400017, 200153, 200545, 200]
pprint(validatemukeys(_mukeys))
fetchsoils(_mukeys, './')
|
"""Representation of Python function headers and calls."""
import collections
import itertools
import logging
from pytype import datatypes
from pytype import utils
from pytype.abstract import abstract_utils
from pytype.pytd import pytd
from pytype.pytd import pytd_utils
log = logging.getLogger(__name__)
_isinstance = abstract_utils._isinstance # pylint: disable=protected-access
_make = abstract_utils._make # pylint: disable=protected-access
def argname(i):
"""Get a name for an unnamed positional argument, given its position."""
return "_" + str(i)
def get_signatures(func):
"""Gets the given function's signatures."""
if _isinstance(func, "PyTDFunction"):
return [sig.signature for sig in func.signatures]
elif _isinstance(func, "InterpreterFunction"):
return [f.signature for f in func.signature_functions()]
elif _isinstance(func, "BoundFunction"):
sigs = get_signatures(func.underlying)
return [sig.drop_first_parameter() for sig in sigs] # drop "self"
elif _isinstance(func, ("ClassMethod", "StaticMethod")):
return get_signatures(func.method)
elif _isinstance(func, "SimpleFunction"):
return [func.signature]
elif _isinstance(func.cls, "CallableClass"):
return [Signature.from_callable(func.cls)]
else:
unwrapped = abstract_utils.maybe_unwrap_decorated_function(func)
if unwrapped:
return list(itertools.chain.from_iterable(
get_signatures(f) for f in unwrapped.data))
if _isinstance(func, "Instance"):
_, call_var = func.ctx.attribute_handler.get_attribute(
func.ctx.root_node, func, "__call__",
func.to_binding(func.ctx.root_node))
if call_var and len(call_var.data) == 1:
return get_signatures(call_var.data[0])
raise NotImplementedError(func.__class__.__name__)
def _print(t):
return pytd_utils.Print(t.get_instance_type())
class Signature:
"""Representation of a Python function signature.
Attributes:
name: Name of the function.
param_names: A tuple of positional parameter names.
varargs_name: Name of the varargs parameter. (The "args" in *args)
kwonly_params: Tuple of keyword-only parameters. (Python 3)
E.g. ("x", "y") for "def f(a, *, x, y=2)". These do NOT appear in
param_names. Ordered like in the source file.
kwargs_name: Name of the kwargs parameter. (The "kwargs" in **kwargs)
defaults: Dictionary, name to value, for all parameters with default values.
annotations: A dictionary of type annotations. (string to type)
excluded_types: A set of type names that will be ignored when checking the
count of type parameters.
type_params: The set of type parameter names that appear in annotations.
has_return_annotation: Whether the function has a return annotation.
has_param_annotations: Whether the function has parameter annotations.
"""
def __init__(self, name, param_names, varargs_name, kwonly_params,
kwargs_name, defaults, annotations,
postprocess_annotations=True):
self.name = name
self.param_names = param_names
self.varargs_name = varargs_name
self.kwonly_params = kwonly_params
self.kwargs_name = kwargs_name
self.defaults = defaults
self.annotations = annotations
self.excluded_types = set()
if postprocess_annotations:
for k, annot in self.annotations.items():
self.annotations[k] = self._postprocess_annotation(k, annot)
self.type_params = set()
for annot in self.annotations.values():
self.type_params.update(
p.name for p in annot.ctx.annotation_utils.get_type_parameters(annot))
@property
def has_return_annotation(self):
return "return" in self.annotations
@property
def has_param_annotations(self):
return bool(self.annotations.keys() - {"return"})
def add_scope(self, module):
"""Add scope for type parameters in annotations."""
annotations = {}
for key, val in self.annotations.items():
annotations[key] = val.ctx.annotation_utils.add_scope(
val, self.excluded_types, module)
self.annotations = annotations
def _postprocess_annotation(self, name, annotation):
"""Postprocess the given annotation."""
ctx = annotation.ctx
if name == self.varargs_name:
return _make("ParameterizedClass",
ctx.convert.tuple_type, {abstract_utils.T: annotation}, ctx)
elif name == self.kwargs_name:
params = {abstract_utils.K: ctx.convert.str_type,
abstract_utils.V: annotation}
return _make("ParameterizedClass", ctx.convert.dict_type, params, ctx)
else:
return annotation
def set_annotation(self, name, annotation):
self.annotations[name] = self._postprocess_annotation(name, annotation)
def del_annotation(self, name):
del self.annotations[name] # Raises KeyError if annotation does not exist.
def check_type_parameter_count(self, stack):
"""Check the count of type parameters in function."""
c = collections.Counter()
for annot in self.annotations.values():
c.update(annot.ctx.annotation_utils.get_type_parameters(annot))
for param, count in c.items():
if param.name in self.excluded_types:
# skip all the type parameters in `excluded_types`
continue
if count == 1 and not (param.constraints or param.bound or
param.covariant or param.contravariant):
param.ctx.errorlog.invalid_annotation(
stack, param, (f"TypeVar {param.name!r} appears only once in the "
"function signature"))
def drop_first_parameter(self):
return self._replace(param_names=self.param_names[1:])
def mandatory_param_count(self):
num = len([name
for name in self.param_names if name not in self.defaults])
num += len([name
for name in self.kwonly_params if name not in self.defaults])
return num
def maximum_param_count(self):
if self.varargs_name or self.kwargs_name:
return None
return len(self.param_names) + len(self.kwonly_params)
@classmethod
def from_pytd(cls, ctx, name, sig):
"""Construct an abstract signature from a pytd signature."""
pytd_annotations = [(p.name, p.type)
for p in sig.params + (sig.starargs, sig.starstarargs)
if p is not None]
pytd_annotations.append(("return", sig.return_type))
def param_to_var(p):
return ctx.convert.constant_to_var(
p.type, subst=datatypes.AliasingDict(), node=ctx.root_node)
return cls(
name=name,
param_names=tuple(p.name for p in sig.params if not p.kwonly),
varargs_name=None if sig.starargs is None else sig.starargs.name,
kwonly_params=tuple(p.name for p in sig.params if p.kwonly),
kwargs_name=None if sig.starstarargs is None else sig.starstarargs.name,
defaults={p.name: param_to_var(p) for p in sig.params if p.optional},
annotations={
name: ctx.convert.constant_to_value(
typ, subst=datatypes.AliasingDict(), node=ctx.root_node)
for name, typ in pytd_annotations
},
postprocess_annotations=False,
)
@classmethod
def from_callable(cls, val):
annotations = {argname(i): val.formal_type_parameters[i]
for i in range(val.num_args)}
return cls(
name="<callable>",
param_names=tuple(sorted(annotations)),
varargs_name=None,
kwonly_params=(),
kwargs_name=None,
defaults={},
annotations=annotations,
)
@classmethod
def from_param_names(cls, name, param_names, kwonly=False):
"""Construct a minimal signature from a name and a list of param names."""
names = tuple(param_names)
param_names = () if kwonly else names
kwonly_params = names if kwonly else ()
return cls(
name=name,
param_names=param_names,
varargs_name=None,
kwonly_params=kwonly_params,
kwargs_name=None,
defaults={},
annotations={},
)
def has_param(self, name):
return name in self.param_names or name in self.kwonly_params or (
name == self.varargs_name or name == self.kwargs_name)
def insert_varargs_and_kwargs(self, arg_dict):
"""Insert varargs and kwargs from arg_dict into the signature.
Args:
arg_dict: A name->binding dictionary of passed args.
Returns:
A copy of this signature with the passed varargs and kwargs inserted.
"""
varargs_names = []
kwargs_names = []
for name in arg_dict:
if self.has_param(name):
continue
if pytd_utils.ANON_PARAM.match(name):
varargs_names.append(name)
else:
kwargs_names.append(name)
new_param_names = (self.param_names + tuple(sorted(varargs_names)) +
tuple(sorted(kwargs_names)))
return self._replace(param_names=new_param_names)
_ATTRIBUTES = (
set(__init__.__code__.co_varnames[:__init__.__code__.co_argcount]) -
{"self", "postprocess_annotations"})
def _replace(self, **kwargs):
"""Returns a copy of the signature with the specified values replaced."""
assert not set(kwargs) - self._ATTRIBUTES
for attr in self._ATTRIBUTES:
if attr not in kwargs:
kwargs[attr] = getattr(self, attr)
kwargs["postprocess_annotations"] = False
return type(self)(**kwargs)
def iter_args(self, args):
"""Iterates through the given args, attaching names and expected types."""
for i, posarg in enumerate(args.posargs):
if i < len(self.param_names):
name = self.param_names[i]
yield (name, posarg, self.annotations.get(name))
elif self.varargs_name and self.varargs_name in self.annotations:
varargs_type = self.annotations[self.varargs_name]
formal = varargs_type.ctx.convert.get_element_type(varargs_type)
yield (argname(i), posarg, formal)
else:
yield (argname(i), posarg, None)
for name, namedarg in sorted(args.namedargs.items()):
formal = self.annotations.get(name)
if formal is None and self.kwargs_name:
kwargs_type = self.annotations.get(self.kwargs_name)
if kwargs_type:
formal = kwargs_type.ctx.convert.get_element_type(kwargs_type)
yield (name, namedarg, formal)
if self.varargs_name is not None and args.starargs is not None:
yield (self.varargs_name, args.starargs,
self.annotations.get(self.varargs_name))
if self.kwargs_name is not None and args.starstarargs is not None:
yield (self.kwargs_name, args.starstarargs,
self.annotations.get(self.kwargs_name))
def check_defaults(self, ctx):
"""Raises an error if a non-default param follows a default."""
has_default = False
for name in self.param_names:
if name in self.defaults:
has_default = True
elif has_default:
msg = (f"In method {self.name}, non-default argument {name} "
"follows default argument")
ctx.errorlog.invalid_function_definition(ctx.vm.frames, msg)
return
def _yield_arguments(self):
"""Yield all the function arguments."""
names = list(self.param_names)
if self.varargs_name:
names.append("*" + self.varargs_name)
elif self.kwonly_params:
names.append("*")
names.extend(sorted(self.kwonly_params))
if self.kwargs_name:
names.append("**" + self.kwargs_name)
for name in names:
base_name = name.lstrip("*")
annot = self._print_annot(base_name)
default = self._print_default(base_name)
yield name + (": " + annot if annot else "") + (
" = " + default if default else "")
def _print_annot(self, name):
return _print(self.annotations[name]) if name in self.annotations else None
def _print_default(self, name):
if name in self.defaults:
values = self.defaults[name].data
if len(values) > 1:
return "Union[%s]" % ", ".join(_print(v) for v in values)
else:
return _print(values[0])
else:
return None
def __repr__(self):
args = ", ".join(self._yield_arguments())
ret = self._print_annot("return")
return "def {name}({args}) -> {ret}".format(
name=self.name, args=args, ret=ret if ret else "Any")
def get_first_arg(self, callargs):
return callargs.get(self.param_names[0]) if self.param_names else None
class Args(collections.namedtuple(
"Args", ["posargs", "namedargs", "starargs", "starstarargs"])):
"""Represents the parameters of a function call."""
def __new__(cls, posargs, namedargs=None, starargs=None, starstarargs=None):
"""Create arguments for a function under analysis.
Args:
posargs: The positional arguments. A tuple of cfg.Variable.
namedargs: The keyword arguments. A dictionary, mapping strings to
cfg.Variable.
starargs: The *args parameter, or None.
starstarargs: The **kwargs parameter, or None.
Returns:
An Args instance.
"""
assert isinstance(posargs, tuple), posargs
cls.replace = cls._replace
return super().__new__(
cls,
posargs=posargs,
namedargs=namedargs or {},
starargs=starargs,
starstarargs=starstarargs)
def has_namedargs(self):
if isinstance(self.namedargs, dict):
return bool(self.namedargs)
else:
return bool(self.namedargs.pyval)
def has_non_namedargs(self):
return bool(self.posargs or self.starargs or self.starstarargs)
def is_empty(self):
return not (self.has_namedargs() or self.has_non_namedargs())
def starargs_as_tuple(self, node, ctx):
try:
args = self.starargs and abstract_utils.get_atomic_python_constant(
self.starargs, tuple)
except abstract_utils.ConversionError:
args = None
if not args:
return args
return tuple(
var if var.bindings else ctx.convert.empty.to_variable(node)
for var in args)
def starstarargs_as_dict(self):
"""Return **args as a python dict."""
# NOTE: We can't use get_atomic_python_constant here because starstarargs
# could have could_contain_anything set.
if not self.starstarargs or len(self.starstarargs.data) != 1:
return None
kwdict, = self.starstarargs.data
if not _isinstance(kwdict, "Dict"):
return None
return kwdict.pyval
def _expand_typed_star(self, ctx, node, star, count):
"""Convert *xs: Sequence[T] -> [T, T, ...]."""
if not count:
return []
p = abstract_utils.merged_type_parameter(node, star, abstract_utils.T)
if not p.bindings:
# TODO(b/159052609): This shouldn't happen. For some reason,
# namedtuple instances don't have any bindings in T; see
# tests/test_unpack:TestUnpack.test_unpack_namedtuple.
return [ctx.new_unsolvable(node) for _ in range(count)]
return [p.AssignToNewVariable(node) for _ in range(count)]
def _unpack_and_match_args(self, node, ctx, match_signature, starargs_tuple):
"""Match args against a signature with unpacking."""
posargs = self.posargs
namedargs = self.namedargs
# As we have the function signature we will attempt to adjust the
# starargs into the missing posargs.
pre = []
post = []
stars = collections.deque(starargs_tuple)
while stars and not abstract_utils.is_var_splat(stars[0]):
pre.append(stars.popleft())
while stars and not abstract_utils.is_var_splat(stars[-1]):
post.append(stars.pop())
post.reverse()
n_matched = len(posargs) + len(pre) + len(post)
required_posargs = 0
for p in match_signature.param_names:
if p in namedargs or p in match_signature.defaults:
break
required_posargs += 1
posarg_delta = required_posargs - n_matched
if stars and not post:
star = stars[-1]
if match_signature.varargs_name:
# If the invocation ends with `*args`, return it to match against *args
# in the function signature. For f(<k args>, *xs, ..., *ys), transform
# to f(<k args>, *ys) since ys is an indefinite tuple anyway and will
# match against all remaining posargs.
return posargs + tuple(pre), abstract_utils.unwrap_splat(star)
else:
# If we do not have a `*args` in match_signature, just expand the
# terminal splat to as many args as needed and then drop it.
mid = self._expand_typed_star(ctx, node, star, posarg_delta)
return posargs + tuple(pre + mid), None
elif posarg_delta <= len(stars):
# We have too many args; don't do *xs expansion. Go back to matching from
# the start and treat every entry in starargs_tuple as length 1.
n_params = len(match_signature.param_names)
all_args = posargs + starargs_tuple
if not match_signature.varargs_name:
# If the function sig has no *args, return everything in posargs
pos = _splats_to_any(all_args, ctx)
return pos, None
# Don't unwrap splats here because f(*xs, y) is not the same as f(xs, y).
# TODO(mdemello): Ideally, since we are matching call f(*xs, y) against
# sig f(x, y) we should raise an error here.
pos = _splats_to_any(all_args[:n_params], ctx)
star = []
for var in all_args[n_params:]:
if abstract_utils.is_var_splat(var):
star.append(
abstract_utils.merged_type_parameter(node, var, abstract_utils.T))
else:
star.append(var)
if star:
return pos, ctx.convert.tuple_to_value(star).to_variable(node)
else:
return pos, None
elif stars:
if len(stars) == 1:
# Special case (<pre>, *xs) and (*xs, <post>) to fill in the type of xs
# in every remaining arg.
mid = self._expand_typed_star(ctx, node, stars[0], posarg_delta)
else:
# If we have (*xs, <k args>, *ys) remaining, and more than k+2 params to
# match, don't try to match the intermediate params to any range, just
# match all k+2 to Any
mid = [ctx.new_unsolvable(node) for _ in range(posarg_delta)]
return posargs + tuple(pre + mid + post), None
else:
# We have **kwargs but no *args in the invocation
return posargs + tuple(pre), None
def simplify(self, node, ctx, match_signature=None):
"""Try to insert part of *args, **kwargs into posargs / namedargs."""
# TODO(rechen): When we have type information about *args/**kwargs,
# we need to check it before doing this simplification.
posargs = self.posargs
namedargs = self.namedargs
starargs = self.starargs
starstarargs = self.starstarargs
# Unpack starstarargs into namedargs. We need to do this first so we can see
# what posargs are still required.
starstarargs_as_dict = self.starstarargs_as_dict()
if starstarargs_as_dict is not None:
# Unlike varargs below, we do not adjust starstarargs into namedargs when
# the function signature has matching param_names because we have not
# found a benefit in doing so.
if self.namedargs is None:
namedargs = starstarargs_as_dict
else:
namedargs.update(node, starstarargs_as_dict)
# We have pulled out all the named args from the function call, so we need
# to delete them from starstarargs. If the original call contained
# **kwargs, starstarargs will have could_contain_anything set to True, so
# preserve it as an abstract dict. If not, we just had named args packed
# into starstarargs, so set starstarargs to None.
kwdict = starstarargs.data[0]
if _isinstance(kwdict, "Dict") and kwdict.could_contain_anything:
cls = kwdict.cls
if _isinstance(cls, "PyTDClass"):
# If cls is not already parameterized with the key and value types, we
# parameterize it now to preserve them.
params = {
name: ctx.convert.merge_classes(
kwdict.get_instance_type_parameter(name, node).data)
for name in (abstract_utils.K, abstract_utils.V)
}
cls = _make("ParameterizedClass", ctx.convert.dict_type, params, ctx)
starstarargs = cls.instantiate(node)
else:
starstarargs = None
starargs_as_tuple = self.starargs_as_tuple(node, ctx)
if starargs_as_tuple is not None:
if match_signature:
posargs, starargs = self._unpack_and_match_args(node, ctx,
match_signature,
starargs_as_tuple)
elif (starargs_as_tuple and
abstract_utils.is_var_splat(starargs_as_tuple[-1])):
# If the last arg is an indefinite iterable keep it in starargs. Convert
# any other splats to Any.
# TODO(mdemello): If there are multiple splats should we just fall
# through to the next case (setting them all to Any), and only hit this
# case for a *single* splat in terminal position?
posargs = self.posargs + _splats_to_any(starargs_as_tuple[:-1], ctx)
starargs = abstract_utils.unwrap_splat(starargs_as_tuple[-1])
else:
# Don't try to unpack iterables in any other position since we don't
# have a signature to match. Just set all splats to Any.
posargs = self.posargs + _splats_to_any(starargs_as_tuple, ctx)
starargs = None
return Args(posargs, namedargs, starargs, starstarargs)
def get_variables(self):
variables = list(self.posargs) + list(self.namedargs.values())
if self.starargs is not None:
variables.append(self.starargs)
if self.starstarargs is not None:
variables.append(self.starstarargs)
return variables
def replace_posarg(self, pos, val):
new_posargs = self.posargs[:pos] + (val,) + self.posargs[pos + 1:]
return self._replace(posargs=new_posargs)
def replace_namedarg(self, name, val):
new_namedargs = dict(self.namedargs)
new_namedargs[name] = val
return self._replace(namedargs=new_namedargs)
def delete_namedarg(self, name):
new_namedargs = {k: v for k, v in self.namedargs.items() if k != name}
return self._replace(namedargs=new_namedargs)
class ReturnValueMixin:
"""Mixin for exceptions that hold a return node and variable."""
def __init__(self):
super().__init__()
self.return_node = None
self.return_variable = None
def set_return(self, node, var):
self.return_node = node
self.return_variable = var
def get_return(self, state):
return state.change_cfg_node(self.return_node), self.return_variable
# These names are chosen to match pytype error classes.
# pylint: disable=g-bad-exception-name
class FailedFunctionCall(Exception, ReturnValueMixin):
"""Exception for failed function calls."""
def __gt__(self, other):
return other is None
class NotCallable(FailedFunctionCall):
"""For objects that don't have __call__."""
def __init__(self, obj):
super().__init__()
self.obj = obj
class UndefinedParameterError(FailedFunctionCall):
"""Function called with an undefined variable."""
def __init__(self, name):
super().__init__()
self.name = name
class DictKeyMissing(Exception, ReturnValueMixin):
"""When retrieving a key that does not exist in a dict."""
def __init__(self, name):
super().__init__()
self.name = name
def __gt__(self, other):
return other is None
BadCall = collections.namedtuple("_", ["sig", "passed_args", "bad_param"])
class BadParam(
collections.namedtuple("_", ["name", "expected", "error_details"])):
def __new__(cls, name, expected, error_details=None):
return super().__new__(cls, name, expected, error_details)
class InvalidParameters(FailedFunctionCall):
"""Exception for functions called with an incorrect parameter combination."""
def __init__(self, sig, passed_args, ctx, bad_param=None):
super().__init__()
self.name = sig.name
passed_args = [(name, ctx.convert.merge_values(arg.data))
for name, arg, _ in sig.iter_args(passed_args)]
self.bad_call = BadCall(sig=sig, passed_args=passed_args,
bad_param=bad_param)
class WrongArgTypes(InvalidParameters):
"""For functions that were called with the wrong types."""
def __gt__(self, other):
if other is None:
return True
if not isinstance(other, WrongArgTypes):
# WrongArgTypes should take precedence over other FailedFunctionCall
# subclasses but not over unrelated errors like DictKeyMissing.
return isinstance(other, FailedFunctionCall)
# The signature that has fewer *args/**kwargs tends to be more precise.
def starcount(err):
return (bool(err.bad_call.sig.varargs_name) +
bool(err.bad_call.sig.kwargs_name))
return starcount(self) < starcount(other)
class WrongArgCount(InvalidParameters):
"""E.g. if a function expecting 4 parameters is called with 3."""
class WrongKeywordArgs(InvalidParameters):
"""E.g. an arg "x" is passed to a function that doesn't have an "x" param."""
def __init__(self, sig, passed_args, ctx, extra_keywords):
super().__init__(sig, passed_args, ctx)
self.extra_keywords = tuple(extra_keywords)
class DuplicateKeyword(InvalidParameters):
"""E.g. an arg "x" is passed to a function as both a posarg and a kwarg."""
def __init__(self, sig, passed_args, ctx, duplicate):
super().__init__(sig, passed_args, ctx)
self.duplicate = duplicate
class MissingParameter(InvalidParameters):
"""E.g. a function requires parameter 'x' but 'x' isn't passed."""
def __init__(self, sig, passed_args, ctx, missing_parameter):
super().__init__(sig, passed_args, ctx)
self.missing_parameter = missing_parameter
# pylint: enable=g-bad-exception-name
class Mutation(collections.namedtuple("_", ["instance", "name", "value"])):
def __eq__(self, other):
return (self.instance == other.instance and
self.name == other.name and
frozenset(self.value.data) == frozenset(other.value.data))
def __hash__(self):
return hash((self.instance, self.name, frozenset(self.value.data)))
class PyTDSignature(utils.ContextWeakrefMixin):
"""A PyTD function type (signature).
This represents instances of functions with specific arguments and return
type.
"""
def __init__(self, name, pytd_sig, ctx):
super().__init__(ctx)
self.name = name
self.pytd_sig = pytd_sig
self.param_types = [
self.ctx.convert.constant_to_value(
p.type, subst=datatypes.AliasingDict(), node=self.ctx.root_node)
for p in self.pytd_sig.params
]
self.signature = Signature.from_pytd(ctx, name, pytd_sig)
def _map_args(self, args, view):
"""Map the passed arguments to a name->binding dictionary.
Args:
args: The passed arguments.
view: A variable->binding dictionary.
Returns:
A tuple of:
a list of formal arguments, each a (name, abstract value) pair;
a name->binding dictionary of the passed arguments.
Raises:
InvalidParameters: If the passed arguments don't match this signature.
"""
formal_args = [(p.name, self.signature.annotations[p.name])
for p in self.pytd_sig.params]
arg_dict = {}
# positional args
for name, arg in zip(self.signature.param_names, args.posargs):
arg_dict[name] = view[arg]
num_expected_posargs = len(self.signature.param_names)
if len(args.posargs) > num_expected_posargs and not self.pytd_sig.starargs:
raise WrongArgCount(self.signature, args, self.ctx)
# Extra positional args are passed via the *args argument.
varargs_type = self.signature.annotations.get(self.signature.varargs_name)
if _isinstance(varargs_type, "ParameterizedClass"):
for (i, vararg) in enumerate(args.posargs[num_expected_posargs:]):
name = argname(num_expected_posargs + i)
arg_dict[name] = view[vararg]
formal_args.append(
(name, varargs_type.get_formal_type_parameter(abstract_utils.T)))
# named args
for name, arg in args.namedargs.items():
if name in arg_dict:
raise DuplicateKeyword(self.signature, args, self.ctx, name)
arg_dict[name] = view[arg]
extra_kwargs = set(args.namedargs) - {p.name for p in self.pytd_sig.params}
if extra_kwargs and not self.pytd_sig.starstarargs:
raise WrongKeywordArgs(self.signature, args, self.ctx, extra_kwargs)
# Extra keyword args are passed via the **kwargs argument.
kwargs_type = self.signature.annotations.get(self.signature.kwargs_name)
if _isinstance(kwargs_type, "ParameterizedClass"):
# We sort the kwargs so that matching always happens in the same order.
for name in sorted(extra_kwargs):
formal_args.append(
(name, kwargs_type.get_formal_type_parameter(abstract_utils.V)))
# packed args
packed_args = [("starargs", self.signature.varargs_name),
("starstarargs", self.signature.kwargs_name)]
for arg_type, name in packed_args:
actual = getattr(args, arg_type)
pytd_val = getattr(self.pytd_sig, arg_type)
if actual and pytd_val:
arg_dict[name] = view[actual]
# The annotation is Tuple or Dict, but the passed arg only has to be
# Iterable or Mapping.
typ = self.ctx.convert.widen_type(self.signature.annotations[name])
formal_args.append((name, typ))
return formal_args, arg_dict
def _fill_in_missing_parameters(self, node, args, arg_dict):
for p in self.pytd_sig.params:
if p.name not in arg_dict:
if (not p.optional and args.starargs is None and
args.starstarargs is None):
raise MissingParameter(self.signature, args, self.ctx, p.name)
# Assume the missing parameter is filled in by *args or **kwargs.
# Unfortunately, we can't easily use *args or **kwargs to fill in
# something more precise, since we need a Value, not a Variable.
arg_dict[p.name] = self.ctx.convert.unsolvable.to_binding(node)
def substitute_formal_args(self, node, args, view, alias_map):
"""Substitute matching args into this signature. Used by PyTDFunction."""
formal_args, arg_dict = self._map_args(args, view)
self._fill_in_missing_parameters(node, args, arg_dict)
subst, bad_arg = self.ctx.matcher(node).compute_subst(
formal_args, arg_dict, view, alias_map)
if subst is None:
if self.signature.has_param(bad_arg.name):
signature = self.signature
else:
signature = self.signature.insert_varargs_and_kwargs(arg_dict)
raise WrongArgTypes(signature, args, self.ctx, bad_param=bad_arg)
if log.isEnabledFor(logging.DEBUG):
log.debug("Matched arguments against sig%s",
pytd_utils.Print(self.pytd_sig))
for nr, p in enumerate(self.pytd_sig.params):
log.info("param %d) %s: %s <=> %s", nr, p.name, p.type, arg_dict[p.name])
for name, var in sorted(subst.items()):
log.debug("Using %s=%r %r", name, var, var.data)
return arg_dict, subst
def instantiate_return(self, node, subst, sources):
return_type = self.pytd_sig.return_type
# Type parameter values, which are instantiated by the matcher, will end up
# in the return value. Since the matcher does not call __init__, we need to
# do that now. The one exception is that Type[X] does not instantiate X, so
# we do not call X.__init__.
if return_type.name != "builtins.type":
for param in pytd_utils.GetTypeParameters(return_type):
if param.full_name in subst:
node = self.ctx.vm.call_init(node, subst[param.full_name])
try:
ret = self.ctx.convert.constant_to_var(
abstract_utils.AsReturnValue(return_type),
subst,
node,
source_sets=[sources])
except self.ctx.convert.TypeParameterError:
# The return type contains a type parameter without a substitution.
subst = subst.copy()
for t in pytd_utils.GetTypeParameters(return_type):
if t.full_name not in subst:
subst[t.full_name] = self.ctx.convert.empty.to_variable(node)
return node, self.ctx.convert.constant_to_var(
abstract_utils.AsReturnValue(return_type),
subst,
node,
source_sets=[sources])
if not ret.bindings and isinstance(return_type, pytd.TypeParameter):
ret.AddBinding(self.ctx.convert.empty, [], node)
return node, ret
def call_with_args(self, node, func, arg_dict,
subst, ret_map, alias_map=None):
"""Call this signature. Used by PyTDFunction."""
t = (self.pytd_sig.return_type, subst)
sources = [func] + list(arg_dict.values())
if t not in ret_map:
node, ret_map[t] = self.instantiate_return(node, subst, sources)
else:
# add the new sources
for data in ret_map[t].data:
ret_map[t].AddBinding(data, sources, node)
mutations = self._get_mutation(node, arg_dict, subst, ret_map[t])
self.ctx.vm.trace_call(
node, func, (self,),
tuple(arg_dict[p.name] for p in self.pytd_sig.params), {}, ret_map[t])
return node, ret_map[t], mutations
@classmethod
def _collect_mutated_parameters(cls, typ, mutated_type):
if (isinstance(typ, pytd.UnionType) and
isinstance(mutated_type, pytd.UnionType)):
if len(typ.type_list) != len(mutated_type.type_list):
raise ValueError(
"Type list lengths do not match:\nOld: %s\nNew: %s" %
(typ.type_list, mutated_type.type_list))
return itertools.chain.from_iterable(
cls._collect_mutated_parameters(t1, t2)
for t1, t2 in zip(typ.type_list, mutated_type.type_list))
if typ == mutated_type and isinstance(typ, pytd.ClassType):
return [] # no mutation needed
if (not isinstance(typ, pytd.GenericType) or
not isinstance(mutated_type, pytd.GenericType) or
typ.base_type != mutated_type.base_type or
not isinstance(typ.base_type, pytd.ClassType)):
raise ValueError("Unsupported mutation:\n%r ->\n%r" %
(typ, mutated_type))
return [zip(mutated_type.base_type.cls.template, mutated_type.parameters)]
def _get_mutation(self, node, arg_dict, subst, retvar):
"""Mutation for changing the type parameters of mutable arguments.
This will adjust the type parameters as needed for pytd functions like:
def append_float(x: list[int]):
x = list[int or float]
This is called after all the signature matching has succeeded, and we
know we're actually calling this function.
Args:
node: The current CFG node.
arg_dict: A map of strings to pytd.Bindings instances.
subst: Current type parameters.
retvar: A variable of the return value.
Returns:
A list of Mutation instances.
Raises:
ValueError: If the pytd contains invalid information for mutated params.
"""
# Handle mutable parameters using the information type parameters
mutations = []
# It's possible that the signature contains type parameters that are used
# in mutations but are not filled in by the arguments, e.g. when starargs
# and starstarargs have type parameters but are not in the args. Check that
# subst has an entry for every type parameter, adding any that are missing.
if any(f.mutated_type for f in self.pytd_sig.params):
subst = subst.copy()
for t in pytd_utils.GetTypeParameters(self.pytd_sig):
if t.full_name not in subst:
subst[t.full_name] = self.ctx.convert.empty.to_variable(node)
for formal in self.pytd_sig.params:
actual = arg_dict[formal.name]
arg = actual.data
if (formal.mutated_type is not None and _isinstance(arg, "SimpleValue")):
try:
all_names_actuals = self._collect_mutated_parameters(
formal.type, formal.mutated_type)
except ValueError as e:
log.error("Old: %s", pytd_utils.Print(formal.type))
log.error("New: %s", pytd_utils.Print(formal.mutated_type))
log.error("Actual: %r", actual)
raise ValueError("Mutable parameters setting a type to a "
"different base type is not allowed.") from e
for names_actuals in all_names_actuals:
for tparam, type_actual in names_actuals:
log.info("Mutating %s to %s",
tparam.name,
pytd_utils.Print(type_actual))
type_actual_val = self.ctx.convert.constant_to_var(
abstract_utils.AsInstance(type_actual),
subst,
node,
discard_concrete_values=True)
mutations.append(Mutation(arg, tparam.full_name, type_actual_val))
if self.name == "__new__":
# This is a constructor, so check whether the constructed instance needs
# to be mutated.
for ret in retvar.data:
if ret.cls.full_name != "builtins.type":
for t in ret.cls.template:
if t.full_name in subst:
mutations.append(Mutation(ret, t.full_name, subst[t.full_name]))
return mutations
def get_positional_names(self):
return [p.name for p in self.pytd_sig.params
if not p.kwonly]
def set_defaults(self, defaults):
"""Set signature's default arguments. Requires rebuilding PyTD signature.
Args:
defaults: An iterable of function argument defaults.
Returns:
Self with an updated signature.
"""
defaults = list(defaults)
params = []
for param in reversed(self.pytd_sig.params):
if defaults:
defaults.pop() # Discard the default. Unless we want to update type?
params.append(pytd.Parameter(
name=param.name,
type=param.type,
kwonly=param.kwonly,
optional=True,
mutated_type=param.mutated_type
))
else:
params.append(pytd.Parameter(
name=param.name,
type=param.type,
kwonly=param.kwonly,
optional=False, # Reset any previously-set defaults
mutated_type=param.mutated_type
))
new_sig = pytd.Signature(
params=tuple(reversed(params)),
starargs=self.pytd_sig.starargs,
starstarargs=self.pytd_sig.starstarargs,
return_type=self.pytd_sig.return_type,
exceptions=self.pytd_sig.exceptions,
template=self.pytd_sig.template
)
# Now update self
self.pytd_sig = new_sig
self.param_types = [
self.ctx.convert.constant_to_value(
p.type, subst=datatypes.AliasingDict(), node=self.ctx.root_node)
for p in self.pytd_sig.params
]
self.signature = Signature.from_pytd(self.ctx, self.name, self.pytd_sig)
return self
def __repr__(self):
return pytd_utils.Print(self.pytd_sig)
def _splats_to_any(seq, ctx):
return tuple(
ctx.new_unsolvable(ctx.root_node) if abstract_utils.is_var_splat(v) else v
for v in seq)
def call_function(ctx,
node,
func_var,
args,
fallback_to_unsolvable=True,
allow_noreturn=False):
"""Call a function.
Args:
ctx: The abstract context.
node: The current CFG node.
func_var: A variable of the possible functions to call.
args: The arguments to pass. See function.Args.
fallback_to_unsolvable: If the function call fails, create an unknown.
allow_noreturn: Whether typing.NoReturn is allowed in the return type.
Returns:
A tuple (CFGNode, Variable). The Variable is the return value.
Raises:
DictKeyMissing: if we retrieved a nonexistent key from a dict and
fallback_to_unsolvable is False.
FailedFunctionCall: if the call fails and fallback_to_unsolvable is False.
"""
assert func_var.bindings
result = ctx.program.NewVariable()
nodes = []
error = None
has_noreturn = False
for funcb in func_var.bindings:
func = funcb.data
one_result = None
try:
new_node, one_result = func.call(node, funcb, args)
except (DictKeyMissing, FailedFunctionCall) as e:
if e > error:
error = e
else:
if ctx.convert.no_return in one_result.data:
if allow_noreturn:
# Make sure NoReturn was the only thing returned.
assert len(one_result.data) == 1
has_noreturn = True
else:
for b in one_result.bindings:
if b.data != ctx.convert.no_return:
result.PasteBinding(b)
else:
result.PasteVariable(one_result, new_node, {funcb})
nodes.append(new_node)
if nodes:
node = ctx.join_cfg_nodes(nodes)
if not result.bindings:
v = ctx.convert.no_return if has_noreturn else ctx.convert.unsolvable
result.AddBinding(v, [], node)
elif (isinstance(error, FailedFunctionCall) and
all(abstract_utils.func_name_is_class_init(func.name)
for func in func_var.data)):
# If the function failed with a FailedFunctionCall exception, try calling
# it again with fake arguments. This allows for calls to __init__ to
# always succeed, ensuring pytype has a full view of the class and its
# attributes. If the call still fails, call_with_fake_args will return
# abstract.Unsolvable.
node, result = ctx.vm.call_with_fake_args(node, func_var)
elif ctx.options.precise_return and len(func_var.bindings) == 1:
funcb, = func_var.bindings
func = funcb.data
if _isinstance(func, "BoundFunction"):
func = func.underlying
if _isinstance(func, "PyTDFunction"):
node, result = func.signatures[0].instantiate_return(node, {}, [funcb])
elif _isinstance(func, "InterpreterFunction"):
sig = func.signature_functions()[0].signature
ret = sig.annotations.get("return", ctx.convert.unsolvable)
node, result = ctx.vm.init_class(node, ret)
else:
result = ctx.new_unsolvable(node)
else:
result = ctx.new_unsolvable(node)
ctx.vm.trace_opcode(None, func_var.data[0].name.rpartition(".")[-1],
(func_var, result))
if nodes:
return node, result
elif fallback_to_unsolvable:
if not isinstance(error, DictKeyMissing):
ctx.errorlog.invalid_function_call(ctx.vm.stack(func_var.data[0]), error)
return node, result
else:
# We were called by something that does its own error handling.
assert error
error.set_return(node, result)
raise error # pylint: disable=raising-bad-type
def match_all_args(ctx, node, func, args):
"""Call match_args multiple times to find all type errors.
Args:
ctx: The abstract context.
node: The current CFG node.
func: An abstract function
args: An Args object to match against func
Returns:
A tuple of (new_args, errors)
where new_args = args with all incorrectly typed values set to Any
errors = a list of [(type mismatch error, arg name, value)]
Reraises any error that is not function.InvalidParameters
"""
positional_names = func.get_positional_names()
needs_checking = True
errors = []
while needs_checking:
try:
func.match_args(node, args)
except FailedFunctionCall as e:
if isinstance(e, WrongKeywordArgs):
errors.append((e, e.extra_keywords[0], None))
for i in e.extra_keywords:
args = args.delete_namedarg(i)
elif isinstance(e, DuplicateKeyword):
errors.append((e, e.duplicate, None))
args = args.delete_namedarg(e.duplicate)
elif isinstance(e, MissingParameter):
errors.append((e, e.missing_parameter, None))
args = args.replace_namedarg(
e.missing_parameter, ctx.new_unsolvable(node))
elif isinstance(e, WrongArgTypes):
arg_name = e.bad_call.bad_param.name
for name, value in e.bad_call.passed_args:
if name != arg_name:
continue
errors.append((e, name, value))
try:
pos = positional_names.index(name)
except ValueError:
args = args.replace_namedarg(name, ctx.new_unsolvable(node))
else:
args = args.replace_posarg(pos, ctx.new_unsolvable(node))
break
else:
raise AssertionError(
"Mismatched parameter %s not found in passed_args" %
arg_name) from e
else:
# This is not an InvalidParameters error.
raise
else:
needs_checking = False
return args, errors
|
#################
## Created by Engin Cukuroglu
#################
import os,sys,time
import multiprocessing
from multiprocessing import Queue, Process
import subprocess
import socket
from codesOfTools import multiprotSolutionReader, vdwInterfaceDictionaryCreator
def multiprotRunListWork(taskQueue_multiprotRunList, multiprotRunResultQueue, multiprotRunFileDirectory, multiprotRunForProcessesFileDirectory, multiprotInterfaceListFileDirectory, multiprotListSuffix, multiprotResultsForInterfaceFilesDirectory, multiprotResultsSuffix, interfaceFilesDirectory, interfaceSkeletonFilesDirectory, multiprotLogFileDirectory, i):
processWorkingDirectory = '%s/process_%s' %(multiprotRunForProcessesFileDirectory, i)
if not os.path.exists(processWorkingDirectory):
os.system('mkdir %s' %(processWorkingDirectory))
os.chdir(processWorkingDirectory)
while True:
interface, interfaceIndex, interfaceSize, skeletonSize, numberOfMultiprotRuns = taskQueue_multiprotRunList.get()
taskQueue_multiprotRunList.task_done()
if interface == None:
break
if numberOfMultiprotRuns > 0:
interfaceMultiprotResultsFileDirectory = '../../%s/%s_%s' %(multiprotResultsForInterfaceFilesDirectory, interface, multiprotResultsSuffix)
tempInterfaceMultiprotResultsFileDirectory = '../../%s/%s_%s_temp' %(multiprotResultsForInterfaceFilesDirectory, interface, multiprotResultsSuffix)
interfaceMultiprotListFileDirectory = '../../%s/%s_%s' %(multiprotInterfaceListFileDirectory, interface, multiprotListSuffix)
##### start changes for yunus cluster #####
interfaceMultiprotListFileDirectory = '%s/%s_%s' %(multiprotInterfaceListFileDirectory, interface, multiprotListSuffix)
##### end changes for yunus cluster #####
if os.path.exists(tempInterfaceMultiprotResultsFileDirectory):
os.system('rm %s' %(tempInterfaceMultiprotResultsFileDirectory))
interfaceMultiprotResultsDict = {}
if os.path.exists(interfaceMultiprotResultsFileDirectory):
interfaceMultiprotResultsFile = open(interfaceMultiprotResultsFileDirectory, 'r')
tempInterfaceMultiprotResultsFile = open(tempInterfaceMultiprotResultsFileDirectory, 'w')
for interfaceMultiprotResults in interfaceMultiprotResultsFile:
tempInterfaceMultiprotResultsFile.write(interfaceMultiprotResults)
splittedInterfaceMultiprotResults = interfaceMultiprotResults.strip().split('\t')
interfaceMultiprotResultsDict[splittedInterfaceMultiprotResults[1]] = 1
interfaceMultiprotResultsFile.close()
tempInterfaceMultiprotResultsFile.close()
if os.path.exists(interfaceMultiprotListFileDirectory):
interfaceMultiprotListFile = open(interfaceMultiprotListFileDirectory, 'r')
tempInterfaceMultiprotResultsFile = open(tempInterfaceMultiprotResultsFileDirectory, 'a')
multiprotErrorString = ''
multiprotStoredRunsCounter = 0
multiprotSuccessfulRunsCounter = 0
multiprotUnsuccessfulRunsCounter = 0
for interfaceMultiprotEntry in interfaceMultiprotListFile:
splittedInterfaceMultiprotEntry = interfaceMultiprotEntry.strip().split('\t')
tempInterface = splittedInterfaceMultiprotEntry[1]
tempInterfaceSize = splittedInterfaceMultiprotEntry[3]
tempSkeletonSize = splittedInterfaceMultiprotEntry[5]
if not tempInterface in interfaceMultiprotResultsDict:
if os.path.exists('2_sol.res'):
os.system('rm 2_sol.res')
if os.path.exists('2_sets.res'):
os.system('rm 2_sets.res')
if os.path.exists('log_multiprot.txt'):
os.system('rm log_multiprot.txt')
stderr = socket.socketpair()
stderr[0].settimeout(0.01)
stdout = socket.socketpair()
stdout[0].settimeout(0.01)
errMessage = ''
multiprotRunDirectory_forProcess = '../../%s' %(multiprotRunFileDirectory)
skeleton_1_pdbDirectory = '../../%s/%s.pdb' %(interfaceSkeletonFilesDirectory, interface)
skeleton_2_pdbDirectory = '../../%s/%s.pdb' %(interfaceSkeletonFilesDirectory, tempInterface)
proc = subprocess.Popen([multiprotRunDirectory_forProcess, skeleton_1_pdbDirectory, skeleton_2_pdbDirectory], stdout=stdout[1], stderr=stderr[1], close_fds=True)
err = u''
while True:
proc.poll()
try:
errtmp = stderr[0].recv(4096)
except socket.timeout as exc:
errtmp = ''
if len(err) > 4096:
proc.kill()
proc.wait()
if proc.returncode != None:
returnCode = proc.returncode
break
if err:
multiprotErrorString = '%sError: %s and %s.\t%s\n' %(multiprotErrorString, interface, tempInterface, err.strip())
multiprotUnsuccessfulRunsCounter = multiprotUnsuccessfulRunsCounter + 1
else:
if os.path.exists('2_sol.res'):
alignmentDict, referenceMonomer, alignedMonomer, rmsd, transMatrix = multiprotSolutionReader('2_sol.res', 0)
interface_1_pdbDirectory = '../../%s/%s.pdb' %(interfaceFilesDirectory, interface)
interface_2_pdbDirectory = '../../%s/%s.pdb' %(interfaceFilesDirectory, tempInterface)
interface_1_dict = vdwInterfaceDictionaryCreator(interface_1_pdbDirectory)
interface_2_dict = vdwInterfaceDictionaryCreator(interface_2_pdbDirectory)
interfaceMatchingSize = 0
skeletonMatchingSize = 0
if referenceMonomer == interface:
for referenceDictKey in alignmentDict:
skeletonMatchingSize = skeletonMatchingSize + 1
if referenceDictKey in interface_1_dict:
if alignmentDict[referenceDictKey] in interface_2_dict:
interfaceMatchingSize = interfaceMatchingSize + 1
else:
for referenceDictKey in alignmentDict:
skeletonMatchingSize = skeletonMatchingSize + 1
if referenceDictKey in interface_2_dict:
if alignmentDict[referenceDictKey] in interface_1_dict:
interfaceMatchingSize = interfaceMatchingSize + 1
if interfaceSize < tempInterfaceSize:
minInterfaceSize = interfaceSize
else:
minInterfaceSize = tempInterfaceSize
if skeletonSize < tempSkeletonSize:
minSkeletonSize = skeletonSize
else:
minSkeletonSize = tempSkeletonSize
interfaceMatchingRatio = float(interfaceMatchingSize) / minInterfaceSize
skeletonMatchingRatio = float(skeletonMatchingSize) / minSkeletonSize
tempInterfaceMultiprotResultsFile.write('%s\t%d\t%d\t%.2f\t%.2f\t%.2f\t%s\n' %(interfaceMultiprotEntry.strip(), interfaceMatchingSize, skeletonMatchingSize, interfaceMatchingRatio, skeletonMatchingRatio, rmsd, transMatrix))
multiprotSuccessfulRunsCounter = multiprotSuccessfulRunsCounter + 1
else:
multiprotUnsuccessfulRunsCounter = multiprotUnsuccessfulRunsCounter + 1
if os.path.exists('2_sol.res'):
os.system('rm 2_sol.res')
if os.path.exists('2_sets.res'):
os.system('rm 2_sets.res')
if os.path.exists('log_multiprot.txt'):
os.system('rm log_multiprot.txt')
else:
multiprotStoredRunsCounter = multiprotStoredRunsCounter + 1
interfaceMultiprotListFile.close()
tempInterfaceMultiprotResultsFile.close()
os.system('mv %s %s' %(tempInterfaceMultiprotResultsFileDirectory, interfaceMultiprotResultsFileDirectory))
if not multiprotErrorString == '':
interfaceMultiprotLogFileDirectory = '../../%s/%s_multiprotErrorLogs.txt' %(multiprotLogFileDirectory, interface)
interfaceMultiprotLogFile = open(interfaceMultiprotLogFileDirectory, 'w')
interfaceMultiprotLogFile.write(multiprotErrorString)
multiprotRunResultString = '%s\t0\t%d\t%d\t%d\t%d\t%d\n' %(interface, interfaceIndex, numberOfMultiprotRuns, multiprotStoredRunsCounter, multiprotSuccessfulRunsCounter, multiprotUnsuccessfulRunsCounter)
multiprotRunResultQueue.put(multiprotRunResultString)
else:
multiprotRunResultString = '%s\t2\t%d\t%d\tError: The %s does not exist\n' %(interface, interfaceIndex, numberOfMultiprotRuns, interfaceMultiprotListFileDirectory)
multiprotRunResultQueue.put(multiprotRunResultString)
else:
multiprotRunResultString = '%s\t0\t%d\t%d\t0\t0\t0\n' %(interface, interfaceIndex, numberOfMultiprotRuns)
multiprotRunResultQueue.put(multiprotRunResultString)
def mainMultiprotDriver(fullMultiprotStatisticsFileDirectory, interfaceStartIndex, interfaceEndIndex, multiprotRunFileDirectory, multiprotRunForProcessesFileDirectory, multiprotInterfaceListFileDirectory, multiprotListSuffix, multiprotResultsForInterfaceFilesDirectory, multiprotResultsSuffix, multiprotRunListLogFileDirectory, multiprotRunListResultFileDirectory, interfaceFilesDirectory, interfaceSkeletonFilesDirectory, multiprotLogFileDirectory, numberOfProcesses):
print('\n* MULTIPROT DRIVER STARTED *\n')
print('Time stamp: %s' %(time.asctime()))
t1 = time.time()
if not os.path.exists(fullMultiprotStatisticsFileDirectory):
sys.exit('\nThe %s does not exist.\n' %(fullMultiprotStatisticsFileDirectory))
if not os.path.exists(multiprotResultsForInterfaceFilesDirectory):
os.system('mkdir %s' %(multiprotResultsForInterfaceFilesDirectory))
if os.path.exists(multiprotRunForProcessesFileDirectory):
os.system('rm -r %s' %(multiprotRunForProcessesFileDirectory))
os.system('mkdir %s' %(multiprotRunForProcessesFileDirectory))
else:
os.system('mkdir %s' %(multiprotRunForProcessesFileDirectory))
if not os.path.exists(multiprotLogFileDirectory):
os.system('mkdir %s' %(multiprotLogFileDirectory))
taskList = []
taskDict = {}
totalNumberOfMultiprotRuns = 0
totalNumberOfInterfaces = 0
fullMultiprotStatisticsFile = open(fullMultiprotStatisticsFileDirectory, 'r')
multiprotRunListLogFile = open(multiprotRunListLogFileDirectory, 'w')
for interfaceMultiprotInfo in fullMultiprotStatisticsFile:
splittedInterfaceMultiprotInfo = interfaceMultiprotInfo.strip().split('\t')
interfaceIndex = int(splittedInterfaceMultiprotInfo[1])
interfaceStatus = 0
if interfaceStartIndex < 0:
if interfaceEndIndex < 0:
interfaceStatus = 1
elif interfaceIndex <= interfaceEndIndex:
interfaceStatus = 1
elif interfaceIndex >= interfaceStartIndex:
if interfaceEndIndex < 0:
interfaceStatus = 1
elif interfaceIndex <= interfaceEndIndex:
interfaceStatus = 1
if interfaceStatus == 1:
interface = splittedInterfaceMultiprotInfo[0]
if not interface in taskDict:
taskDict[interface] = 1
totalNumberOfMultiprotRuns = totalNumberOfMultiprotRuns + int(splittedInterfaceMultiprotInfo[4])
totalNumberOfInterfaces = totalNumberOfInterfaces + 1
taskList.append([interface, interfaceIndex, int(splittedInterfaceMultiprotInfo[2]), int(splittedInterfaceMultiprotInfo[3]), int(splittedInterfaceMultiprotInfo[4])])
multiprotRunListLogFile.write(interfaceMultiprotInfo)
fullMultiprotStatisticsFile.close()
multiprotRunListLogFile.write('---\nTotal Number of Interfaces: %d\n' %(totalNumberOfInterfaces))
multiprotRunListLogFile.write('Total Number of Multiprot Runs: %d\n---\n' %(totalNumberOfMultiprotRuns))
multiprotRunListLogFile.close()
taskQueue_multiprotRunList = multiprocessing.JoinableQueue()
multiprotRunResultQueue = multiprocessing.Queue()
generateMultiprotRunListWorkers = [Process(target=multiprotRunListWork, args=(taskQueue_multiprotRunList, multiprotRunResultQueue, multiprotRunFileDirectory, multiprotRunForProcessesFileDirectory, multiprotInterfaceListFileDirectory, multiprotListSuffix, multiprotResultsForInterfaceFilesDirectory, multiprotResultsSuffix, interfaceFilesDirectory, interfaceSkeletonFilesDirectory, multiprotLogFileDirectory, i)) for i in range(numberOfProcesses)]
for tempWorkers in generateMultiprotRunListWorkers:
tempWorkers.start()
for interfaceInfoList in taskList:
taskQueue_multiprotRunList.put(interfaceInfoList)
for i in range(numberOfProcesses):
taskQueue_multiprotRunList.put([None, None, None, None, None])
taskQueue_multiprotRunList.join()
totalInterfaceResults = totalNumberOfInterfaces
multiprotRunListResultFile = open(multiprotRunListResultFileDirectory, 'w')
while totalInterfaceResults:
interfaceMultiprotResultString = multiprotRunResultQueue.get()
multiprotRunListResultFile.write(interfaceMultiprotResultString)
totalInterfaceResults = totalInterfaceResults - 1
t2 = time.time()
print('\nElapsed time = %f seconds\n' %(t2-t1))
print('Time stamp: %s' %(time.asctime()))
print('\n* MULTIPROT DRIVER COMPLETED *\n')
|
import requests
class MyCrawler:
def __init__(self):
pass
@classmethod
def fetchHtml(cls, url):
return requests.get(url).text |
# Used to tokenize tweets and/or process them in terms of politics
import re
class TweetTokenizer:
# Tokenizes tweets
def __init__(self):
# Setup re object for finding emojis
emoji_str = r"""
(?:
[>}]? # Eyebrows (optional)
[:=;8xX] # Eyes
[']? # Tears
[oO\-^]? # Nose (optional)
[D\)\]\(\[/\\oO0pPbB<>*3$sSL] # Mouth
)|(?:
[D\)\]\(\[/\\oO0pPbB<>*3$sSL] # Mouth
[oO\-^]? # Nose (optional)
[']? # Tears
[:=;8xX] # Eyes
[<{]? # Eyebrows (optional)
)"""
# Setup re object for finding different entities
regex_str = [
emoji_str,
r'<[^>]+>', # HTML tags
r'(?:@[\w_]+)', # @-mentions
r"(?:\#+[\w_\u00C0-\u00FF]+[\w\'_\-]*[\w_\u00C0-\u024F]+)", # hashtags
r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+', # URLs
r'(?:(?:\d+,?)+(?:\.?\d+)?)', # numbers
r"(?:[a-z\u00C0-\u00FF][a-z\u00C0-\u00FF'\-_]+[a-z\u00C0-\u00FF])", # words with - and '
r'(?:[\w_\u00C0-\u00FF]+)', # other words
r'(?:\S)' # anything else
]
self.tokens_re = re.compile(r'(' + '|'.join(regex_str) + ')', re.VERBOSE | re.IGNORECASE)
self.emoticon_re = re.compile(r'^' + emoji_str + '$', re.VERBOSE | re.IGNORECASE)
def tokenize(self, s, lowercase=False):
# Converts tweets to tokens, lowercases them if flag true, keeps emojis
tokens = self.tokens_re.findall(s)
if lowercase:
tokens = [token if self.emoticon_re.search(token) else token.lower() for token in tokens]
return tokens
class TweetPolitics:
# Functions to process tweets and find info about politics
def __init__(self):
# Setup sets with keywords related to candidates, winning, losing, debate
self.set_amlove = {r"amlo", r"andrés manuel lópez obrador", r"andrés manuel", r"@lopezobrador_", r"#amlo",
r"peje", r"morena", r"#morena"}
self.set_ricky = {r"ricardo anaya", r"anaya", r"@ricardoanayac", r"#anaya", r"pan", r"#pan"}
self.set_meade = {r"josé antonio meade", r"jose antonio meade", r"meade", r"@joseameadek", r"#meade"
r"pri", r"#pri"}
self.set_bronco = {r"jaime rodriguez", r"jaime rodríguez", r"jaime rodriguez calderon",
r"jaime rodríguez calderón", r"@jaimerdznl", r"bronco", r"#bronco"}
self.set_win = {r"ganó", r"gano", r"gana", r"ganar", r"victorioso", r"victoria", r"conquista", r"conquistó",
r"conquisto", r"triunfa", r"triunfó", "triunfo", r"vence", r"venció", r"vencio", r"domina",
r"dominó", r"domino"}
self.set_lose = {r"pierde", r"perdió", r"perdio", r"perder", r"fracasa", r"fracasó", r"fracaso"}
self.set_debate = {r"debate", r"#debate", r"#debateine", r"#debatepresidencial", r"#tercerdebate",
r"#tercerdebatepresidencial"}
def only_one_candidate(self, t):
# This function filters tweets that mention more than one candidate
# XOR of candidates
c1 = self.mentions_cand(t, 1)
c2 = self.mentions_cand(t, 2)
c3 = self.mentions_cand(t, 3)
c4 = self.mentions_cand(t, 4)
if c1 + c2 + c3 + c4 == 1:
return c1*"amlo" + c2*"ricky" + c3*"meade" + c4*"bronco"
else:
return ''
def mentions_cand(self, t, c):
# Check if tweet mentions something about candidate c
if c == 1:
temp_set = self.set_amlove
elif c == 2:
temp_set = self.set_ricky
elif c == 3:
temp_set = self.set_meade
elif c == 4:
temp_set = self.set_bronco
else:
temp_set = ""
for token in t:
if token in temp_set:
return 1
# Return 0 otherwise
return 0
def did_he_win(self, t):
# Check if tweet mentions something about winning the debate
win = False
for token in t:
if token in self.set_win:
win = True
if token in self.set_lose:
return False
return win
def did_he_lose(self, t):
# Check if tweet mentions something about losing the debate
lose = False
for token in t:
if token in self.set_lose:
lose = True
if token in self.set_win:
return False
return lose
def mention_debate(self, t):
# Check if tweet mentions something about the debate
for token in t:
if token in self.set_debate:
return True
return False
|
""" Python class to interact with the APIs. """
import sys
import json
import time
import jwt
from requests import Session
from deepmap_sdk import auth, users, tiles, maps
class DeepmapClient:
""" Python Client for the Deepmap API. """
def __init__(self, api_token, server_url='https://api.deepmap.com'):
self.session = Session()
url, payload, headers = auth.create_api_session(api_token, server_url)
self.session.headers.update(headers)
response = self.session.post(url, data=json.dumps(payload))
if response.status_code != 200:
sys.exit('Failed to login.')
token = response.json()['token']
headers['Authorization'] = 'Bearer ' + token
self.session.headers.update(headers)
decoded_token = jwt.decode(token, algorithms=["ES256"], verify=False)
self.expiration = decoded_token['exp']
self.server_url = server_url
def is_exp(self):
""" Returns True if token is expired, False otherwise. """
return time.time() >= self.expiration
def get_exp(self):
""" Returns the Unix time since epoch expiration time. """
return self.expiration
def list_maps(self):
""" Returns a dictionary of the list of maps. """
url = maps.list_maps(self.server_url)
response = self.session.get(url)
return response.json()
def list_feature_tiles(self, map_id):
""" Returns a dictionary of feature tiles for map designated by map_id. """
url = tiles.list_feature_tiles(map_id, self.server_url)
response = self.session.get(url)
return response.json()
def list_users(self):
""" Returns a dictionary of the list of maps. """
url = users.list_users(self.server_url)
response = self.session.get(url)
return response.json()
def download_feature_tile(self, tile_id):
""" Downloads a feature tile designated by tile_id. Returns a binary string. """
url = tiles.download_feature_tile(tile_id, self.server_url)
response = self.session.get(url)
if response.status_code == 200:
return response.content
return response.json()
def get_user(self, user_id):
""" Returns user information for user designated by user_id. """
url = users.get_user(user_id, self.server_url)
response = self.session.get(url)
return response.json()
def invite_user(self, email, admin=''):
""" Invites new user to join.
Args:
email: email of the new user.
admin: String, 'True' if new user is an admin. 'False' or '' otherwise.
"""
url, payload = users.invite_user(email, admin, self.server_url)
response = self.session.post(url, data=json.dumps(payload))
if response.status_code != 200:
print("Error. Could not invite user.")
return {}
return response.json()
def edit_user(self, user_id, email='', admin=''):
""" Edits an exisiting user's information.
Args:
email: email of the new user.
admin: String, 'True' if user will be admin. 'False' or '' otherwise.
"""
url, payload = users.edit_user(user_id, email, admin, self.server_url)
response = self.session.post(url, data=json.dumps(payload))
if response.status_code != 200:
print("Error. Could not edit user.")
else:
print("User edited.")
def delete_user(self, user_id):
""" Deletes user designated by user_id. """
url = users.delete_user(user_id, self.server_url)
response = self.session.delete(url)
if response.status_code != 200:
print("Error. Could not invite user.")
else:
print("User deleted.")
def create_api_token(self, description):
""" Creates an API access token with the given description. """
url, payload = auth.create_api_token(description, self.server_url)
response = self.session.post(url, data=json.dumps(payload))
return response.json()
def create_vehicle_token(self, vehicle_id, description):
""" Creates an vehicle access token with the given description and
vehicle_id. """
url, payload = auth.create_vehicle_token(vehicle_id, description,
self.server_url)
response = self.session.post(url, data=json.dumps(payload))
return response.json()
def delete_api_token(self, token_id):
""" Delete the API token with token_id as its id. """
url = auth.delete_api_token(token_id, self.server_url)
response = self.session.delete(url)
if response.status_code != 200:
print("Error. Could not delete API token.")
else:
print("API token deleted.")
def delete_vehicle_token(self, token_id):
""" Delete the vehicle token with token_id as its id. """
url = auth.delete_vehicle_token(token_id, self.server_url)
response = self.session.delete(url)
if response.status_code != 200:
print("Error. Could not delete vehicle token.")
else:
print("Vehicle token deleted.")
def list_api_tokens(self):
""" List all issued API tokens under the user's account. """
url = auth.list_api_tokens(self.server_url)
response = self.session.get(url)
return response.json()
def list_vehicle_tokens(self):
""" List all issued vehicle tokens under the user's account. """
url = auth.list_vehicle_tokens(self.server_url)
response = self.session.get(url)
return response.json()
def create_api_session(self, api_token):
""" Create an API session token (JWT) using a API access token. """
url, payload, _ = auth.create_api_session(api_token, self.server_url)
response = self.session.post(url, data=json.dumps(payload))
return response.json()
def create_vehicle_session(self, vehicle_token):
""" Create a vehicle session token (JWT) using a API access token. """
url, payload, _ = auth.create_vehicle_session(vehicle_token,
self.server_url)
response = self.session.post(url, data=json.dumps(payload))
return response.json()
def __str__(self):
return "url: {}\nexp: {}\n".format(self.server_url, self.expiration)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013, Roboterclub Aachen e.V.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Roboterclub Aachen e.V. nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY ROBOTERCLUB AACHEN E.V. ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ROBOTERCLUB AACHEN E.V. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
#
# This file contains the base class for all device elements
# i.e. Drivers and Properties
#
class DeviceElementBase:
""" DeviceElementBase
Base class for Property as well as Driver class.
Handles pin-id/size-id and pin-count properties.
"""
def __init__(self, device, node):
self.type = node.tag
self.device = device # parent
self.log = device.log
# split multiple attribute values
#self.attributes = {key : node.attrib[key].split('|') for key in node.attrib}
self.attributes = {}
for key in node.attrib:
self.attributes[key] = node.attrib[key].split('|')
# parse integer counts
for key in self.attributes:
if len(self.attributes[key]) == 1 and 'count' in key:
self.attributes[key] = self._parseCount(self.attributes[key][0])
# place 'none' with None
for key in self.attributes:
self.attributes[key] = [None if v == 'none' else v for v in self.attributes[key]]
def _parseCount(self, count):
if count.isdigit():
return {'type': '==', 'count': int(count)}
elif count[:-1].isdigit() and count[-1:] in ['+', '-']:
return {'type': count[-1:], 'count': int(count[:-1])}
return [count]
def appliesTo(self, device_id, properties={}, matched=None):
"""
checks if this property/driver applies to the device specified by the
device string
"""
if matched == None:
matched = []
for key in self.attributes:
if 'device-' in key:
# we need to compare this attribute to the device id
dev_key = key.replace('device-', '').replace('-', '_')
props = device_id.properties
if dev_key in props:
if props[dev_key] not in self.attributes[key]:
return False
else:
self.log.error("DeviceElementBase: Attribute '%s' is not part of the DeviceIdentifier as '%s'."
" Maybe you misspelled it?" % (key, dev_key))
return False
else:
# these are the other attributes that we need to evauate
props = []
for prop in [p for p in properties if key == p.type]:
# make sure we do not evaluate a circular reference
hash = str(prop.type)+str(prop.value)
if hash in matched[:-1]:
self.log.error("DeviceElementBase: Cannot resolve circular references!"
" '%s' depends on properties that reference this property." % prop.type)
return False
matched.append(hash)
# these properties need to be evaluated recursively
if prop.appliesTo(device_id, properties, matched):
props.append(prop)
for prop in props:
attr = self.attributes[key]
if '-count' in key:
# this is a integer count
if attr['type'] == '==':
if attr['count'] != prop.value:
return False
elif attr['type'] == '+':
if attr['count'] > prop.value:
return False
elif attr['type'] == '-':
if attr['count'] < prop.value:
return False
else:
if prop.value not in attr:
return False
return True
|
print('Exercício Python #028 - Jogo da Adivinhação v.1.0')
import random
from time import sleep
r = random.randint(1, 5) #Faz o computador "PENSAR"
print('-=-' * 20)
u = int(input('Escreva um número de 1 a 5? ')) #Jogador tenta adivinhar
print('-=-' * 20)
print('Processando...')
sleep(1)
if u == r:
print('Parabéns! Você acertou!')
else:
print('Você errou! Talvez na proxima')
if u >= 6:
print('É uma número de 1 a 5, amigo!')
print('-=-' * 20) |
import os
import platform
print("System: {name} {system} {release}".format(name=os.name,
system=platform.system(),
release=platform.release()))
import sys
print("Python: " + sys.version)
import pytest
print("pytest: " + pytest.__version__)
import numpy
print("numpy: " + numpy.__version__)
import shapely
print("shapely: " + shapely.__version__)
import descartes
print("descartes imported")
import skysight
print("skysight: " + skysight.__version__)
|
# CNO rate module generator
from pyreaclib.networks import PythonNetwork
files = ["c12-pg-n13-ls09",
"c13-pg-n14-nacr",
"n13--c13-wc12",
"n13-pg-o14-lg06",
"n14-pg-o15-im05",
"n15-pa-c12-nacr",
"o14--n14-wc12",
"o15--n15-wc12"]
cno_net = PythonNetwork(files)
cno_net.write_network("cno_rhs.py")
|
#!/usr/bin/env python3
#
# Copyright (c) 2019 Two Sigma Open Source, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import base64
import html
import re
import sys
import zlib
from datetime import datetime, timedelta
from http.server import BaseHTTPRequestHandler, HTTPServer
from os import path
from urllib.parse import parse_qs
from lxml import etree
from signxml import XMLSigner
port = sys.argv[1]
expected_acs_endpoint = sys.argv[2]
acs_redirect_url = sys.argv[3]
auth_user = sys.argv[4]
idpserver_root_dir = path.join(path.dirname(path.abspath(__file__)), "..")
def readfile(file):
return open(path.join(idpserver_root_dir, file)).read()
saml_response_redirect_template = readfile("resources/saml-response-redirect-template.html")
def format_time(time):
return time.replace(microsecond=0).isoformat() + 'Z'
def make_saml_response():
key = readfile("resources/privatekey.pem").encode('ascii')
cert = readfile("resources/idp.crt").encode('ascii')
saml_response_template = readfile("resources/saml-response-template.xml")
saml_response = saml_response_template \
.replace("issue-instant-field", format_time(datetime.utcnow())) \
.replace("session-not-on-or-after-field", format_time(datetime.utcnow() + timedelta(days=1))) \
.replace("not-on-or-after-field", format_time(datetime.utcnow() + timedelta(minutes=5))) \
.replace("auth-user-field", auth_user)
root = etree.fromstring(saml_response)
signed_root = XMLSigner().sign(root, key=key, cert=cert)
return base64.b64encode(etree.tostring(signed_root)).decode()
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
"""Respond to a GET request."""
if self.path == "/healthcheck":
self.send_response(200)
self.send_header("content-type", "text/html")
self.end_headers()
self.wfile.write(b"OK")
return
url_tokens = self.path.split("?")
if not url_tokens or len(url_tokens) < 2:
return
query_params = parse_qs(url_tokens[1])
saml_request = query_params["SAMLRequest"][0]
relay_state = query_params["RelayState"][0]
saml_request_b64_decoded = base64.b64decode(saml_request)
saml_request_zlib_decoded = zlib.decompress(saml_request_b64_decoded, -15)
acs_endpoint_match = re.search('AssertionConsumerServiceURL="([^"]+)"', str(saml_request_zlib_decoded))
if acs_endpoint_match and acs_endpoint_match.group(1) == expected_acs_endpoint:
self.send_response(200)
self.send_header("content-type", "text/html")
self.end_headers()
response = saml_response_redirect_template \
.replace("form-action-field", html.escape(acs_redirect_url)) \
.replace("saml-response-field", html.escape(make_saml_response())) \
.replace("relay-state-field", html.escape(relay_state))
self.wfile.write(response.encode('ascii'))
else:
self.send_response(400)
self.send_header("content-type", "text/html")
self.end_headers()
self.wfile.write(b"Invalid AssertionConsumerServiceURL is SAML request. Expecting %s. SAML request: %s"
% (expected_acs_endpoint.encode('ascii'), saml_request_zlib_decoded))
return
def run(server_class=HTTPServer, handler_class=MyHandler):
server_address = ('', int(port))
httpd = server_class(server_address, handler_class)
httpd.serve_forever()
run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright(C) 2021 cGIfl300 <cgifl300@gmail.com>
from django.contrib.auth.models import User
from django.db import IntegrityError
from store.models import UserFollows
def follow(user_to_follow, request):
# Get actual user
actual_user = User.objects.filter(username__iexact=request.user.username)[0]
# Get user to follow
user_to_follow = User.objects.filter(username__iexact=user_to_follow)[0]
try:
add_followed = UserFollows(
user=actual_user, followed_user=user_to_follow
)
add_followed.save()
except IntegrityError:
# If user is already followed, just return True
return False
return True
|
import sys
print(sys.version) |
# Copyright 2021 Kamil Sroka
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from toshiba_ac.http_api import ToshibaAcHttpApi
from toshiba_ac.amqp_api import ToshibaAcAmqpApi
from toshiba_ac.device import ToshibaAcDevice
from toshiba_ac.utils import async_sleep_until_next_multiply_of_minutes
import asyncio
import logging
logger = logging.getLogger(__name__)
class ToshibaAcDeviceManager:
FETCH_ENERGY_CONSUMPTION_PERIOD_MINUTES = 10
def __init__(self, loop, username, password, device_id=None, sas_token=None):
self.loop = loop
self.username = username
self.password = password
self.http_api = None
self.reg_info = None
self.amqp_api = None
self.device_id = self.username + '_' + (device_id or '3e6e4eb5f0e5aa46')
self.sas_token = sas_token
self.devices = {}
self.periodic_fetch_energy_consumption_task = None
self.lock = asyncio.Lock()
async def connect(self):
async with self.lock:
if not self.http_api:
try:
self.http_api = ToshibaAcHttpApi(self.username, self.password)
await self.http_api.connect()
if not self.sas_token:
self.sas_token = await self.http_api.register_client(self.device_id)
self.amqp_api = ToshibaAcAmqpApi(self.sas_token)
self.amqp_api.register_command_handler('CMD_FCU_FROM_AC', self.handle_cmd_fcu_from_ac)
self.amqp_api.register_command_handler('CMD_HEARTBEAT', self.handle_cmd_heartbeat)
await self.amqp_api.connect()
except:
await self.shutdown()
raise
return self.sas_token
async def shutdown(self):
async with self.lock:
if self.periodic_fetch_energy_consumption_task:
self.periodic_fetch_energy_consumption_task.cancel()
await asyncio.gather(*[device.shutdown() for device in self.devices.values()])
if self.amqp_api:
await self.amqp_api.shutdown()
self.amqp_api = None
if self.http_api:
await self.http_api.shutdown()
self.http_api = None
async def periodic_fetch_energy_consumption(self):
while True:
await async_sleep_until_next_multiply_of_minutes(self.FETCH_ENERGY_CONSUMPTION_PERIOD_MINUTES)
await self.fetch_energy_consumption()
async def fetch_energy_consumption(self):
consumptions = await self.http_api.get_devices_energy_consumption([ac_unique_id for ac_unique_id in self.devices.keys()])
logger.debug(f'Power consumption for devices: {consumptions}')
updates = []
for ac_unique_id, consumption in consumptions.items():
update = self.devices[ac_unique_id].handle_update_ac_energy_consumption(consumption)
updates.append(update)
await asyncio.gather(*updates)
async def get_devices(self):
async with self.lock:
if not self.devices:
devices_info = await self.http_api.get_devices()
logger.debug(f'Found devices: {devices_info}')
connects = []
for device_info in devices_info:
device = ToshibaAcDevice(
self.loop,
device_info.ac_name,
self.device_id,
device_info.ac_id,
device_info.ac_unique_id,
device_info.initial_ac_state,
device_info.firmware_version,
device_info.merit_feature,
device_info.ac_model_id,
self.amqp_api,
self.http_api
)
connects.append(device.connect())
logger.debug(f'Adding device {device!r}')
self.devices[device.ac_unique_id] = device
await asyncio.gather(*connects)
await self.fetch_energy_consumption()
if not self.periodic_fetch_energy_consumption_task:
self.periodic_fetch_energy_consumption_task = self.loop.create_task(self.periodic_fetch_energy_consumption())
return list(self.devices.values())
def handle_cmd_fcu_from_ac(self, source_id, message_id, target_id, payload, timestamp):
asyncio.run_coroutine_threadsafe(self.devices[source_id].handle_cmd_fcu_from_ac(payload), self.loop).result()
def handle_cmd_heartbeat(self, source_id, message_id, target_id, payload, timestamp):
asyncio.run_coroutine_threadsafe(self.devices[source_id].handle_cmd_heartbeat(payload), self.loop).result()
|
--- mesonbuild/mesonlib.py.orig 2019-11-28 17:37:44 UTC
+++ mesonbuild/mesonlib.py
@@ -672,7 +672,7 @@ def default_libdir():
return 'lib/' + archpath
except Exception:
pass
- if is_freebsd():
+ if is_freebsd() or is_dragonflybsd:
return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
@@ -697,7 +697,7 @@ def get_library_dirs() -> typing.List[st
# problematic, please raise the issue on the mailing list.
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
- if is_freebsd():
+ if is_freebsd() or is_dragonflybsd:
return unixdirs
# FIXME: this needs to be further genericized for aarch64 etc.
machine = platform.machine()
|
# Classifying movie genres with k-Nearest Neighbors
import kNN
group, labels = kNN.createDataSet()
print(group)
print(labels)
result = kNN.classify0([0, 0], group, labels, 3)
print(result)
|
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module container a helper class for generating config report requests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from . import services_util
from apitools.base.py import encoding
class ConfigReporterValue(object):
"""A container class to hold config report value fields and methods."""
SERVICE_CONFIG_TYPE_URL = 'type.googleapis.com/google.api.Service'
CONFIG_REF_TYPE_URL = ('type.googleapis.com/'
'google.api.servicemanagement.v1.ConfigRef')
CONFIG_SOURCE_TYPE_URL = ('type.googleapis.com/'
'google.api.servicemanagement.v1.ConfigSource')
def __init__(self, service):
self.messages = services_util.GetMessagesModule()
self.service = service
self.config = None
self.swagger_path = None
self.swagger_contents = None
self.config_id = None
self.config_use_active_id = True
def SetConfig(self, config):
self.config = config
self.swagger_path = None
self.swagger_contents = None
self.config_id = None
self.config_use_active_id = False
def SetSwagger(self, path, contents):
self.config = None
self.swagger_path = path
self.swagger_contents = contents
self.config_id = None
self.config_use_active_id = False
def SetConfigId(self, config_id):
self.config = None
self.swagger_path = None
self.swagger_contents = None
self.config_id = config_id
self.config_use_active_id = False
def SetConfigUseDefaultId(self):
self.config = None
self.swagger_path = None
self.swagger_contents = None
self.config_id = None
self.config_use_active_id = True
def GetTypeUrl(self):
if self.config:
return ConfigReporterValue.SERVICE_CONFIG_TYPE_URL
elif self.swagger_path and self.swagger_contents:
return ConfigReporterValue.CONFIG_SOURCE_TYPE_URL
elif self.config_id or self.config_use_active_id:
return ConfigReporterValue.CONFIG_REF_TYPE_URL
def IsReadyForReport(self):
return (self.config is not None or
self.swagger_path is not None or
self.config_id is not None or
self.config_use_active_id)
def ConstructConfigValue(self, value_type):
"""Make a value to insert into the GenerateConfigReport request.
Args:
value_type: The type to encode the message into. Generally, either
OldConfigValue or NewConfigValue.
Returns:
The encoded config value object of type value_type.
"""
result = {}
if not self.IsReadyForReport():
return None
elif self.config:
result.update(self.config)
elif self.swagger_path:
config_file = self.messages.ConfigFile(
filePath=self.swagger_path,
fileContents=self.swagger_contents,
# Always use YAML because JSON is a subset of YAML.
fileType=(self.messages.ConfigFile.
FileTypeValueValuesEnum.OPEN_API_YAML))
config_source_message = self.messages.ConfigSource(files=[config_file])
result.update(encoding.MessageToDict(config_source_message))
else:
if self.config_id:
resource = 'services/{0}/configs/{1}'.format(
self.service, self.config_id)
else:
# self.new_config_use_active_id is guaranteed to be set here,
# so get the active service config ID(s)
active_config_ids = services_util.GetActiveServiceConfigIdsForService(
self.service)
# For now, only one service config ID can be active, so use the first
# one, if one is available.
if active_config_ids:
resource = 'services/{0}/configs/{1}'.format(
self.service, active_config_ids[0])
else:
# Otherwise, just omit the service config ID and let the backend
# attempt to get the latest service config ID. If none is found,
# it will handle this case gracefully.
resource = 'services/{0}'.format(self.service)
result.update({'name': resource})
result.update({'@type': self.GetTypeUrl()})
return encoding.DictToMessage(result, value_type)
class ConfigReporter(object):
"""A container class to hold config report fields and methods."""
def __init__(self, service):
self.client = services_util.GetClientInstance()
self.messages = services_util.GetMessagesModule()
self.service = service
self.old_config = ConfigReporterValue(service)
self.new_config = ConfigReporterValue(service)
def ConstructRequestMessage(self):
old_config_value = self.old_config.ConstructConfigValue(
self.messages.GenerateConfigReportRequest.OldConfigValue)
new_config_value = self.new_config.ConstructConfigValue(
self.messages.GenerateConfigReportRequest.NewConfigValue)
return self.messages.GenerateConfigReportRequest(oldConfig=old_config_value,
newConfig=new_config_value)
def RunReport(self):
result = self.client.services.GenerateConfigReport(
self.ConstructRequestMessage())
if not result:
return None
if not result.changeReports:
return []
return result.changeReports[0]
|
# terrascript/template/d.py
import terrascript
class template_file(terrascript.Data):
pass
class template_cloudinit_config(terrascript.Data):
pass
|
"""
SystemC serializer serializes HDL objects to systemC code.
"""
|
from robotchef.plugin import Chef
class Cook(Chef):
def make(self, food=None):
print("I can cook " + food)
|
import json
import io
from pathlib import Path
import xml.etree
import pyproj
import pytest
import rasterio.coords
import stac_vrt
HERE = Path(__name__).parent.absolute()
@pytest.fixture
def response():
with open(HERE / "tests/response.json") as f:
resp = json.load(f)
return resp
def assert_vrt_equal(result, expected):
for path in [
".",
"SRS",
"GeoTransform",
"VRTRasterBand",
"VRTRasterBand/ColorInterp",
"VRTRasterBand/SimpleSource",
"VRTRasterBand/SimpleSource/SourceFilename",
"VRTRasterBand/SimpleSource/SourceBand",
"VRTRasterBand/SimpleSource/SourceProperties",
"VRTRasterBand/SimpleSource/SrcRect",
"VRTRasterBand/SimpleSource/DstRect",
]:
rchild = result.findall(path)
echild = expected.findall(path)
assert len(echild)
if path != "VRTRasterBand/ColorInterp":
# TODO: check on the expected.
assert len(echild) == len(rchild)
for a, b in zip(echild, rchild):
assert a.attrib == b.attrib
if path == "GeoTransform":
x = list(map(lambda x: round(float(x)), a.text.split(",")))
y = list(map(lambda x: round(float(x)), a.text.split(",")))
assert x == y
else:
assert a.text == b.text
def test_fixture(response):
assert set(response) == {"context", "features", "links", "type"}
def test_integration(response):
stac_items = response["features"]
# TODO: remove when fixed in NAIP data
# Have to at least fix the CRS....
for item in stac_items:
item["properties"]["proj:epsg"] = 26917
crs = pyproj.crs.CRS("epsg:26917")
# TODO: remove when added to NAIP data
res_x = res_y = 0.6
# TODO: remove when added to NAIP data
bboxes = [
rasterio.coords.BoundingBox(
left=530802.0, bottom=2979348.0, right=537426.0, top=2986692.0
),
rasterio.coords.BoundingBox(
left=524604.0, bottom=2979336.0, right=531222.0, top=2986674.0
),
]
# TODO: remove when added to NAIP data
shapes = [(12240, 11040), (12230, 11030)]
# TODO: Remove when added to STAC
data_type = "Byte"
# TODO: Remove when added to STAC
block_width = 512
block_height = 512
# --------------------------
# Now for the test.
result = stac_vrt.build_vrt(
stac_items,
crs=crs,
res_x=res_x,
res_y=res_y,
shapes=shapes,
bboxes=bboxes,
data_type=data_type,
block_width=block_width,
block_height=block_height,
)
expected_tree = xml.etree.ElementTree.parse(HERE / "tests/expected.vrt").getroot()
result_tree = xml.etree.ElementTree.parse(io.StringIO(result)).getroot()
assert_vrt_equal(result_tree, expected_tree)
def test_integration_fixed():
with open(HERE / "tests/response-fixed.json") as f:
resp = json.load(f)
stac_items = resp["features"]
vrt = stac_vrt.build_vrt(
stac_items, data_type="Byte", block_width=512, block_height=512
)
ds = rasterio.open(vrt)
ds.transform
def test_no_items():
with pytest.raises(ValueError, match="Must provide"):
stac_vrt.build_vrt([])
def test_incorrect_bboxes():
with pytest.raises(ValueError, match="2 != 1"):
stac_vrt.build_vrt(
[{"test": 1}],
bboxes=[[1, 2, 3, 4], [5, 6, 7, 8]],
crs=pyproj.crs.CRS("epsg:26917"),
res_x=1,
res_y=1,
)
def test_incorrect_shapes():
with pytest.raises(ValueError, match="2 != 1"):
stac_vrt.build_vrt(
[{"test": 1}],
bboxes=[[1, 2, 3, 4]],
shapes=[[1, 2], [3, 4]],
crs=pyproj.crs.CRS("epsg:26917"),
res_x=1,
res_y=1,
)
def test_multiple_crs_raises():
with open(HERE / "tests/response-fixed.json") as f:
resp = json.load(f)
resp["features"][0]["properties"]["proj:epsg"] = 26918
with pytest.raises(ValueError, match="same CRS"):
stac_vrt.build_vrt(
resp["features"], data_type="Byte", block_width=512, block_height=512
)
def test_missing_crs_raises():
with open(HERE / "tests/response-fixed.json") as f:
resp = json.load(f)
del resp["features"][0]["properties"]["proj:epsg"]
with pytest.raises(KeyError, match="proj:epsg"):
stac_vrt.build_vrt(
resp["features"], data_type="Byte", block_width=512, block_height=512
)
|
#!/usr/bin/env python3
import os
import re
import logging
import requests
log = logging.getLogger(__name__)
baseurl = os.environ.get("UNIFI_BASEURL", "https://unifi:8443")
username = os.environ.get("UNIFI_USERNAME")
password = os.environ.get("UNIFI_PASSWORD")
site = os.environ.get("UNIFI_SITE", "default")
fixed_only = os.environ.get("FIXED_ONLY", "False") == "True"
log_level = os.environ.get("LOG_LEVEL", "INFO")
slug_names = os.environ.get("SLUG_NAMES", "False") == "True"
bad_chars_pattern = re.compile("[^a-zA-Z0-9-]+")
def get_configured_clients(session):
# Get configured clients
r = session.get(f"{baseurl}/api/s/{site}/list/user", verify=False)
r.raise_for_status()
return r.json()["data"]
def get_active_clients(session):
# Get active clients
r = session.get(f"{baseurl}/api/s/{site}/stat/sta", verify=False)
r.raise_for_status()
return r.json()["data"]
def get_clients():
log.debug("Getting clients")
s = requests.Session()
# Log in to controller
r = s.post(
f"{baseurl}/api/login",
json={"username": username, "password": password},
verify=False,
)
r.raise_for_status()
clients = get_configured_clients(s)
if not fixed_only:
clients.extend(get_active_clients(s))
# Merge configured and active clients on mac
# Filter out clients with no/invalid names
clients_by_mac = {}
for c in clients:
log.debug("Client: %s", c)
name = c.get("name")
# IP in use is more relevant than configured fixed IP
ip = c.get("ip") or c.get("fixed_ip")
if not name or not ip:
continue
if slug_names:
name = slug_name(name)
elif re.search(bad_chars_pattern, name):
log.warning("%s skipped due to invalid characters.", name)
continue
clients_by_mac[c["mac"]] = {"name": name, "ip": ip}
return sorted(clients_by_mac.values(), key=lambda c: c["name"])
def slug_name(name):
slug = re.sub(bad_chars_pattern, "-", name)
if slug != name:
log.info("%s was slugged to %s", name, slug)
return slug
if __name__ == "__main__":
logging.basicConfig(
format="%(asctime)s - %(levelname)s: %(message)s", level=log_level
)
try:
for c in get_clients():
print(f"{c['ip']} {c['name']}")
log.debug("Host entry: %s %s", c["ip"], c["name"])
except requests.exceptions.ConnectionError:
log.critical("Could not connect to unifi controller at %s", baseurl)
log.debug("Exception information below", exc_info=True)
exit(1)
except requests.exceptions.HTTPError:
# Bound to be a common error:
if baseurl.startswith("http://"):
log.error(
"Error connecting to %s. You should probably connect using HTTPS instead of HTTP",
baseurl
)
log.debug("Exception information below", exc_info=True)
exit(1)
raise
|
"""
* Copyright (C) Caleb Marshall - All Rights Reserved
* Written by Caleb Marshall <anythingtechpro@gmail.com>, April 23rd, 2017
* Licensing information can found in 'LICENSE', which is part of this source code package.
"""
import string
import random
import struct
class DataBuffer(object):
def __init__(self, data=bytes(), offset=0):
self._data = data
self._offset = offset
@property
def data(self):
return self._data
@property
def offset(self):
return self._offset
@property
def remaining(self):
return self._data[self._offset:]
def write(self, data):
if not len(data):
return
self._data += data
def writeTo(self, fmt, *args):
self.write(struct.pack('!%s' % fmt, *args))
def read(self, length):
data = self._data[self._offset:][:length]
self._offset += length
return data
def clear(self):
self._data = bytes()
self._offset = 0
def readFrom(self, fmt):
data = struct.unpack_from('!%s' % fmt, self._data, self._offset)
self._offset += struct.calcsize('!%s' % fmt)
return data
def readByte(self):
return self.readFrom('B')[0]
def writeByte(self, value):
self.writeTo('B', value)
def readSByte(self):
return self.readFrom('b')[0]
def writeSByte(self, value):
self.writeTo('b', value)
def readShort(self):
return self.readFrom('h')[0]
def writeShort(self, value):
self.writeTo('h', value)
def readString(self, length=64):
return self.read(length).strip()
def writeString(self, string, length=64):
self.write(string + str().join(['\x20'] * (length - len(string))))
def readArray(self, length=1024):
return bytes(self.read(length))
def writeArray(self, array, length=1024):
self.write(array + bytes().join(['\x00'] * (length - len(array))))
def clamp(value, minV, maxV):
return max(minV, min(value, maxV))
def joinWithSpaces(chars):
out = []
for char in chars:
if chars.index(char) == len(char):
out.append(char)
else:
out.append('%s ' % char)
return ''.join(out)
def generateRandomSalt(length=16):
alphanumerics = '%s%s%s' % (string.digits, string.uppercase,
string.lowercase)
chars = []
for _ in xrange(length):
chars.append(random.choice(alphanumerics))
return ''.join(chars)
class Mouse(object):
LEFT_CLICK = 0
RIGHT_CLICK = 1
class ChatColors(object):
BLACK = '&'
DARK_BLUE = '&1'
DARK_GREEN = '&2'
DARK_TEAL = '&3'
DARK_RED = '&4'
PURPLE = '&5'
GOLD = '&6'
GRAY = '&7'
DARK_GRAY = '&8'
BLUE = '&9'
BRIGHT_GREEN = '&a'
TEAL = '&b'
RED = '&c'
PINK = '&d'
YELLOW = '&e'
WHITE = '&f'
class PlayerRanks(object):
GUEST = 0
ADMINISTRATOR = 1
@classmethod
def hasPermission(cls, entity, requiredRank):
if requiredRank == cls.GUEST:
return True
if entity.rank != requiredRank:
return False
return True
class BlockIds(object):
AIR = 0
GRASS = 2
DIRT = 3
COBBLESTONE = 4
WOOD_PLANKS = 5
SAPLING = 6
BEDROCK = 7
FLOWING_WATER = 8
STATIONARY_WATER = 9
FLOWING_LAVA = 10
STATIONARY_LAVA = 11
SAND = 12
GRAVEL = 13
GOLD_ORE = 14
IRON_ORE = 15
COAL_ORE = 16
@classmethod
def hasBlockId(cls, blockId):
return True if getattr(cls, blockId) else False
|
import logging
from django.conf import settings
logger = logging.getLogger(__name__)
if settings.SMS_PROVIDER == 'nexmo':
from nexmo import Client as BaseClient
else:
class BaseClient:
def __init__(self, **kwargs):
self.messages = []
def send_message(self, message):
assert 'to' in message
assert 'from' in message
assert 'text' in message
self.messages.append(message)
def reset(self):
self.messages = []
class Client(BaseClient):
def notify_user(self, user, message):
mobile_number = user.userprofile.mobile_number
if mobile_number:
sms = {
'from': settings.SMS_FROM,
'to': mobile_number.as_e164,
'text': 'EMSTrack:\n' + message,
}
client.send_message(sms)
logger.debug('SMS sent: {}'.format(sms))
else:
logger.debug('SMS not sent: user {} does not have a mobile on file'.format(user))
if not hasattr(BaseClient, 'reset'):
def reset(self):
pass
# notify users that they will be updated call
client = Client(key=settings.SMS_KEY,
secret=settings.SMS_PASS)
|
import math
import time
import numpy as np
#accuracy, precision, recall and f1 score calculations
def scores(labels_test,predicted_labels,time):
TP = 0
TN = 0
FP = 0
FN = 0
for n in range(0,len(labels_test)):
if predicted_labels[n] == 1 and labels_test[n] == 1:
TP +=1
if predicted_labels[n] == 0 and labels_test[n] == 0:
TN +=1
if predicted_labels[n] == 1 and labels_test[n] == 0:
FP +=1
if predicted_labels[n] == 0 and labels_test[n] == 1:
FN +=1
accuracy = (TP+TN)/(TP+FP+FN+TN)
precision = TP/(TP+FP)
recall = TP/(TP+FN)
f1 = 2*(recall * precision) / (recall + precision)
print("accuracy = %.3f " % accuracy)
print("precision = %.3f " % precision)
print("recall = %.3f " % recall)
print("f1 = %.3f " % f1)
print("time passed = %.2f seconds\n" % (time))
#count how many spam and ham sms and calculates p(0) and p(1),
#also returns the training labels
def ham_spam_counter(label):
f2 = open(label)
spam = 0
ham = 0
first_row = 0
labels = []
for lines in f2:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
token = tokens[1].strip("\n")
labels.append(int(token))
if int(token) ==0:
ham +=1
elif int(token) == 1:
spam +=1
f2.close()
P0 = ham/ (spam+ham)
P1 = spam/(spam+ham)
return ham, spam, P0, P1, labels
#calculate how many times words are found in spam and ham sms
def probs(data,labels):
f1 = open(data)
i = 0
j = 0
first_row = 0
word_spam = [0] * 3458
word_ham = [0] * 3458
word_count = [0] * 3458
total_word = 0
for lines in f1:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
j = 0
first_column = 0
for token in tokens:
if first_column==0:
first_column +=1
else:
word_count[j] += int(token)
total_word += int(token)
if labels[i] == 0:
if int(token) != 0:
word_ham[j] += 1
elif labels[i] == 1:
if int(token) != 0:
word_spam[j] += 1
j+=1
i+=1
f1.close()
return word_spam, word_ham, word_count, total_word
#calculate the probability of each P(xj|y=1) and P(xj|y=0)
def probs_calculate(word_spam,word_ham,spam,ham,P0,P1):
P_word_spam = np.array(word_spam) / spam
P_word_ham= np.array(word_ham) / ham
P_spam = math.log(P1)
P_ham = math.log(P0)
return P_word_ham,P_word_spam,P_ham,P_spam
#calculates the mutual information
def mutual_information(data,labels,word_count,P_word_spam,P1,P_word_ham,P0):
f1 = open(data)
i = 0
j = 0
first_row = 0
mutual_info = [0] * 3458
for lines in f1:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
j = 0
first_column = 0
for token in tokens:
if first_column==0:
first_column +=1
else:
if labels[i] == 1:
if word_count[j] == 0:
mutual_info[j]= 0
else:
conditional_e = P_word_spam[j]/((word_count[j])*P1)
if conditional_e:
mutual_info[j] += P_word_spam[j] *math.log(conditional_e,2)
else:
mutual_info[j] += P_word_spam[j]
elif labels[i] == 0 :
if word_count[j] == 0:
mutual_info[j]= 0
else:
conditional_e = P_word_ham[j]/((word_count[j])*P0)
if conditional_e:
mutual_info[j] += P_word_ham[j] * math.log(conditional_e,2)
else:
mutual_info[j] += P_word_ham[j]
j+=1
i+=1
f1.close()
return mutual_info
#finds the features with highest mutual information
def best_features(mutual_info):
return (sorted(range(len(mutual_info)), key=lambda k: mutual_info[k]))
#load test labels
def load_label(test_label):
f4 = open(test_label)
first_row = 0
labels_test = []
for lines in f4:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
token = tokens[1].strip("\n")
labels_test.append(int(token))
f4.close()
return labels_test
#prediction without feature selection
def predict_with_all_features(test_data,P_ham,P_spam,P_word_ham,P_word_spam):
start = time.time()
f3 = open(test_data)
first_row = 0
ham_count = 0
spam_count = 0
predicted_labels = []
for lines in f3:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
j = 0
first_column = 0
is_Ham = P_ham
is_Spam = P_spam
for token in tokens:
if first_column==0:
first_column +=1
else:
if (int(bool(int(token)))):
if P_word_ham[j]!= 0:
is_Ham += math.log(P_word_ham[j])
if P_word_spam[j] != 0:
is_Spam += math.log(P_word_spam[j])
else:
is_Ham += math.log(1-P_word_ham[j])
is_Spam += math.log(1-P_word_spam[j])
j +=1
if is_Ham >= is_Spam:
ham_count +=1
predicted_labels.append(0)
else:
spam_count +=1
predicted_labels.append(1)
f3.close()
end=time.time()
return predicted_labels,(end-start)
#predict with the n number of features using the mutual information
def predict_with_features(test_data,feature_size,P_ham,P_spam,P_word_ham,P_word_spam,best_features):
start= time.time()
f3 = open(test_data)
first_row = 0
ham_count = 0
spam_count = 0
predicted_labels = []
for lines in f3:
if first_row == 0:
first_row +=1
else:
tokens = lines.split(",")
count = 1
first_column = 0
is_Ham = P_ham
is_Spam = P_spam
for j in best_features:
if count == feature_size:
break
if first_column==0:
first_column +=1
else:
if (int(bool(int(tokens[j])))):
if P_word_ham[j]!= 0:
is_Ham += math.log(P_word_ham[j])
if P_word_spam[j] != 0:
is_Spam += math.log(P_word_spam[j])
else:
is_Ham += math.log(1-P_word_ham[j])
is_Spam += math.log(1-P_word_spam[j])
count +=1
if is_Ham >= is_Spam:
ham_count +=1
predicted_labels.append(0)
else:
spam_count +=1
predicted_labels.append(1)
f3.close()
end = time.time()
return predicted_labels, (end-start)
data = "sms_train_features.csv"
label = "sms_train_labels.csv"
test_data = "sms_test_features.csv"
test_label = "sms_test_labels.csv"
ham, spam, P0, P1, labels = ham_spam_counter(label)
word_spam, word_ham, word_count, total_word = probs(data,labels)
P_word_ham,P_word_spam,P_ham,P_spam = probs_calculate(word_spam, word_ham, spam, ham, P0, P1)
mutual_info = mutual_information(data,labels,word_count,P_word_spam,P1,P_word_ham,P0)
best_features = best_features(mutual_info)
labels_test = load_label(test_label)
predicted_labels, time1 = predict_with_all_features(test_data,P_ham,P_spam,P_word_ham,P_word_spam)
scores(labels_test,predicted_labels, time1)
predicted_labels0 , time1 = predict_with_features(test_data,100,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels0, time1)
predicted_labels1 , time1 = predict_with_features(test_data,200,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels1, time1)
predicted_labels2 , time1 = predict_with_features(test_data,300,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels2, time1)
predicted_labels3 , time1 = predict_with_features(test_data,400,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels3, time1)
predicted_labels4 , time1 = predict_with_features(test_data,500,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels4, time1)
predicted_labels5 , time1 = predict_with_features(test_data,600,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels5, time1)
"""
predicted_labels6 , time1 = predict_with_features(test_data,700,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels6, time1)
predicted_labels7 , time1 = predict_with_features(test_data,800,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels7, time1)
predicted_labels8 , time1 = predict_with_features(test_data,900,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels8, time1)
predicted_labels9 , time1 = predict_with_features(test_data,1000,P_ham,P_spam,P_word_ham,P_word_spam,best_features)
scores(labels_test,predicted_labels9, time1)
"""
|
from django.conf.urls import url
from structure.views import *
from structure import views
from django.conf import settings
from django.views.generic import TemplateView
from django.views.decorators.cache import cache_page
urlpatterns = [
url(r'^$', cache_page(60*60*24)(StructureBrowser.as_view()), name='structure_browser'),
url(r'^g_protein_structure_browser$', cache_page(60*60*24)(GProteinStructureBrowser.as_view()), name='g_protein_structure_browser'),
url(r'^browser$', RedirectBrowser, name='redirect_browser'),
url(r'^selection_convert$', ConvertStructuresToProteins, name='convert'),
url(r'^selection_convert_model$', ConvertStructureModelsToProteins, name='convert_mod'),
url(r'^selection_convert_signprot_model$', ConvertStructureComplexSignprotToProteins, name='convert_signprot'),
url(r'^hommod_download$', HommodDownload, name='hommod_download'),
url(r'^complexmod_download$', ComplexmodDownload, name='complexmod_download'),
url(r'^template_browser', TemplateBrowser.as_view(), name='structure_browser'),
url(r'^template_selection', TemplateTargetSelection.as_view(), name='structure_browser'),
url(r'^template_segment_selection', TemplateSegmentSelection.as_view(), name='structure_browser'),
url(r'^gprot_statistics$', cache_page(60*60*24)(StructureStatistics.as_view(origin='gprot')), name='structure_statistics'),
url(r'^statistics$', cache_page(60*60*24)(StructureStatistics.as_view()), name='structure_statistics'),
url(r'^homology_models$', cache_page(60*60*24)(ServeHomologyModels.as_view()), name='homology_models'),
url(r'^complex_models$', cache_page(60*60*24)(ServeComplexModels.as_view()), name='complex_models'),
url(r'^model_statistics$', cache_page(60*60*24)(ServeModelStatistics.as_view()), name='model_statistics'),
# url(r'^pdb_download_index$', PDBClean.as_view(), name='pdb_download'),
url(r'pdb_segment_selection', PDBSegmentSelection.as_view(), name='pdb_download'),
url(r'^pdb_download$', PDBClean.as_view(), name='pdb_download'),
url(r'^pdb_download_custom$', PDBClean.as_view(), name='pdb_download_custom'),
url(r'^pdb_download/(?P<substructure>\w+)$', PDBDownload.as_view(), name='pdb_download'),
url(r'^generic_numbering_index', GenericNumberingIndex.as_view(), name='generic_numbering'),
url(r'^generic_numbering_results$', GenericNumberingResults.as_view(), name='generic_numbering'),
url(r'^generic_numbering_results/(?P<substructure>\w+)$', GenericNumberingDownload.as_view(), name='generic_numbering'),
url(r'^generic_numbering_selection', GenericNumberingSelection.as_view(), name='generic_numbering'),
url(r'^superposition_workflow_index$', SuperpositionWorkflowIndex.as_view(), name='superposition_workflow'),
url(r'^superposition_workflow_index/(?P<clear>\w{4})$', SuperpositionWorkflowIndex.as_view(), name='superposition_workflow'),
url(r'^superposition_workflow_selection', SuperpositionWorkflowSelection.as_view(), name='superposition_workflow'),
url(r'^superposition_workflow_results$', SuperpositionWorkflowResults.as_view(), name='superposition_workflow'),
url(r'^superposition_workflow_results/(?P<substructure>\w+)$', SuperpositionWorkflowDownload.as_view(), name='superposition_workflow'),
# url(r'^fragment_superposition_index', FragmentSuperpositionIndex.as_view(), name='fragment_superposition'),
# url(r'^fragment_superposition_results', FragmentSuperpositionResults.as_view(), name='fragment_superposition'),
url(r'^output/(?P<outfile>\w+.\w{3})/(?P<replacement_tag>\w+)$', ServePdbOutfile, name='structural_tools_result'),
url(r'^zipoutput/(?P<outfile>\w+.\w{3})/', ServeZipOutfile, name='structural_tools_result'),
url(r'^showtrees', RenderTrees, name='render'),
url(r'^webform$', views.webform, name='webform'),
url(r'^webformdata$', views.webformdata, name='webformdata'),
url(r'^construct$', views.webform_two, name='webform_two'),
url(r'^construct/(?P<slug>[\w_]+)$', views.webform_two, name='webform_two'),
url(r'^webform/(?P<slug>[\w_]+)$', views.webform_download, name='webform_download'),
url(r'^(?P<pdbname>\w+)$', cache_page(60*60*24*7)(StructureDetails), name='structure_details'),
url(r'^pdb/(?P<pdbname>\w+)$', cache_page(60*60*24*7)(ServePdbDiagram), name='structure_serve_pdb'),
url(r'^homology_models/(?P<modelname>\w+)_(?P<state>\w+)$', cache_page(60*60*24*7)(HomologyModelDetails), name='homology_model_details'),
url(r'^homology_models/(?P<modelname>\w+)_(?P<state>\w+)_(?P<fullness>\w+)/download_pdb$', SingleModelDownload, name='single_model_download'),
url(r'^refined/(?P<modelname>\w+)_(?P<fullness>\w+)/download_pdb$', SingleModelDownload, name='single_model_download'),
# url(r'^homology_models/(?P<modelname>\w+)_(?P<state>\w+)/download_csv$', SingleModelDownload, {'csv':True}, name='single_model_download'),
url(r'^refined/(?P<pdbname>\w+)$', cache_page(60*60*24*7)(RefinedModelDetails), name="refined_model_details"),
url(r'^complex_models/(?P<modelname>\w+)-(?P<signprot>\w+)/download_pdb$', SingleComplexModelDownload, name='single_complex_model_download'),
url(r'^refined/(?P<modelname>\w+)-(?P<signprot>\w+)/download_pdb$', SingleComplexModelDownload, name='single_complex_model_download'),
# url(r'^complex_models/(?P<modelname>\w+)-(?P<signprot>\w+)/download_csv$', SingleComplexModelDownload, {'csv':True}, name='single_complex_model_download'),
url(r'^homology_models/view/(?P<modelname>\w+)_(?P<state>\w+)$', ServeHomModDiagram, name='hommod_serve_view'),
url(r'^complex_models/(?P<modelname>\w+)-(?P<signprot>\w+)$', cache_page(60*60*24*7)(ComplexModelDetails), name='complex_model_details'),
url(r'^complex_models/view/(?P<modelname>\w+)-(?P<signprot>\w+)$', ServeComplexModDiagram, name='complexmod_serve_view'),
url(r'^pdb/(?P<pdbname>\w+)/ligand/(?P<ligand>.+)$', ServePdbLigandDiagram, name='structure_serve_pdb_ligand'),
]
|
# -*- coding: utf-8 -*-
import scrapy
from ..items import CrudeoilpricingItem
class MyspdSpider(scrapy.Spider):
name = 'myspd'
allowed_domains = ['https://www.macrotrends.net/2516/wti-crude-oil-prices-10-year-daily-chart']
start_urls = ['https://www.macrotrends.net/2516/wti-crude-oil-prices-10-year-daily-chart']
def parse(self, response):
headers = response.xpath("//th/text()").getall()
data = response.xpath("//td/text()").getall()
obj = CrudeoilpricingItem()
obj['header'] = headers
obj['data'] = data
formattedData = format_data(obj)
return formattedData
def format_data(self, obj):
|
from __future__ import print_function
from __future__ import division
from . import _C
import torch
import torch.nn.functional as F
from fuzzytools.strings import xstr
from . import exceptions as ex
import numpy as np
import pandas as pd
###################################################################################################################################################
def _check_batch(batch_loss):
assert len(batch_loss.shape)==1 # (b)
assert len(batch_loss)>0
def _check(batch_loss, batch_weights):
_check_batch(batch_loss)
assert batch_weights is None or batch_loss.shape==batch_weights.shape
###################################################################################################################################################
class BatchLoss():
def __init__(self, batch_loss, batch_weights):
_check(batch_loss, batch_weights)
self.batch_loss = batch_loss # (n)
self.batch_weights = batch_weights # (n)
self.reset()
def reset(self):
self.batch_sublosses = {}
def __len__(self):
return len(self.batch_loss)
def add_subloss(self, batch_subloss_name, batch_subloss):
_check_batch(batch_subloss)
self.batch_sublosses[batch_subloss_name] = batch_subloss # (n)
def get_loss_item(self,
get_tensor=False,
):
batch_weights = 1/len(self) if self.batch_weights is None else self.batch_weights # (n)
loss_item = torch.sum(self.batch_loss*batch_weights) # (n)>()
if torch.any(torch.isnan(loss_item)) or torch.any(~torch.isfinite(loss_item)):
raise ex.NanLossError()
if get_tensor:
return loss_item # ()
else:
return loss_item.detach().item() # ()
def backward(self):
loss_item = self.get_loss_item(get_tensor=True)
loss_item.backward() # gradient calculation
def get_subloss_item(self, batch_subloss_name,
get_tensor=False,
):
batch_weights = 1/len(self) if self.batch_weights is None else self.batch_weights
loss_item = torch.sum(self.batch_sublosses[batch_subloss_name]*batch_weights) # (n)>()
if get_tensor:
return loss_item # ()
else:
return loss_item.detach().item() # ()
def get_sublosses_names(self):
return list(self.batch_sublosses.keys())
def __repr__(self):
lv = f'{xstr(self.get_loss_item())}'
batch_sublosses = list(self.batch_sublosses.keys())
if len(batch_sublosses)==0:
return f'{lv}'
else:
txt = '|'.join([f'{batch_subloss}={xstr(self.get_subloss_item(batch_subloss))}' for batch_subloss in batch_sublosses])
return f'{lv} ({txt})'
def __add__(self, other):
# concatenate
if self is None or self==0:
return other
elif other is None or other==0:
return self
elif type(self)==BatchLoss and type(other)==BatchLoss:
new_batch_loss = torch.cat([self.batch_loss, other.batch_loss], dim=0) # (n1+n2)
new_batch_weights = None if (self.batch_weights is None or other.batch_weights is None) else torch.cat([self.batch_weights, other.batch_weights], dim=0) # (n1+n2)
new_loss = BatchLoss(new_batch_loss, new_batch_weights)
for subloss_name in self.get_sublosses_names():
new_batch_subloss = torch.cat([self.batch_sublosses[subloss_name], other.batch_sublosses[subloss_name]], dim=0) # (n1+n2)
new_loss.add_subloss(subloss_name, new_batch_subloss)
return new_loss
else:
raise Exception(f'{type(self)}; {type(other)}')
def __radd__(self, other):
return self+other
def get_info(self):
d = {
'_len':len(self),
'_loss':self.get_loss_item(),
}
for subloss_name in self.get_sublosses_names():
d[subloss_name] = self.get_subloss_item(subloss_name)
return d
###################################################################################################################################################
class FTLoss():
def __init__(self, name, weight_key,
**kwargs):
self.name = name
self.weight_key = weight_key
def _get_weights(self, tdict,
**kwargs):
if self.weight_key is None:
return None
else:
batch_weights = tdict[self.weight_key] # (n)
# print(f'batch_weights={batch_weights}')
return batch_weights
def __call__(self, tdict,
**kwargs):
batch_weights = self._get_weights(tdict, **kwargs)
loss_dict = self.compute_loss(tdict, **kwargs)
if type(loss_dict)==dict:
assert '_loss' in loss_dict.keys()
_loss = loss_dict['_loss']
loss_obj = BatchLoss(_loss, batch_weights)
for key in loss_dict.keys():
sub_loss = loss_dict[key] # (n)
if key=='_loss':
continue
loss_obj.add_subloss(key, sub_loss)
return loss_obj
elif type(loss_dict)==torch.Tensor:
_loss = loss_dict # (n)
loss_obj = BatchLoss(_loss, batch_weights)
return loss_obj
else:
raise Exception(f'type={type(loss_dict)}') |
import pygal
from datetime import datetime, date
from pygal.style import Style
invis_style = Style(background = 'transparent')
def generate(libraries, metric, chart_type):
# default for chart_type is default chart to be created
chart = None
if metric == 'popularity':
if chart_type == 'default' or chart_type == 'bar_raw':
chart = generate_bar_chart_popularity(libraries)
elif chart_type == 'pie':
chart = generate_pie_chart_popularity(libraries)
elif chart_type == 'gauge':
chart = generate_solid_gauge_chart_popularity(libraries)
elif metric == 'release-frequency':
if chart_type == 'default' or chart_type == 'bar_avg':
chart = generate_bar_chart_release_frequency(libraries)
elif chart_type == 'box':
chart = generate_box_chart_release_frequency(libraries)
elif chart_type == 'line':
chart = generate_line_chart_release_frequency(libraries)
elif metric == 'last-modification-date':
if chart_type == 'default' or chart_type == 'bar_days':
chart = generate_bar_chart_last_modification(libraries)
elif metric == 'performance':
if chart_type == 'default' or chart_type == 'box':
chart = generate_box_chart_performance(libraries)
elif chart_type == 'gauge':
chart = generate_solid_gauge_chart_performance(libraries)
elif metric == 'security':
if chart_type == 'default' or chart_type == 'box':
chart = generate_box_chart_security(libraries)
elif chart_type == 'gauge':
chart = generate_solid_gauge_chart_security(libraries)
elif metric == 'issue-response-time':
if chart_type == 'default' or chart_type == 'xy':
chart = generate_xy_chart_issue_response_time(libraries)
elif chart_type == 'box':
chart = generate_box_chart_issue_response_time(libraries)
elif metric == 'issue-closing-time':
if chart_type == 'default' or chart_type == 'xy':
chart = generate_xy_chart_issue_closing_time(libraries)
elif chart_type == 'box':
chart = generate_box_chart_issue_closing_time(libraries)
elif metric == 'backwards-compatibility':
if chart_type == 'default' or chart_type == 'bar':
chart = generate_bar_chart_backwards_compatibility(libraries)
elif chart_type == 'line':
chart = generate_line_chart_backwards_compatibility(libraries)
elif metric == 'last-discussed-on-so':
if chart_type == 'default' or chart_type == 'box':
chart = generate_box_chart_last_discussed(libraries)
elif chart_type == 'scatter':
chart = generate_scatter_chart_last_discussed(libraries)
return chart
# popularity
def generate_bar_chart_popularity(libraries):
bar_chart = pygal.Bar(title = 'Number Of Software Projects Making Use Of The Library' , x_title='', y_title='', x_label_rotation = -45, style = invis_style)
sorted_libraries = sorted(libraries, key=lambda library: library.popularity, reverse=True)
for library in sorted_libraries:
bar_chart.add(library.name, library.popularity)
return bar_chart
def generate_pie_chart_popularity(libraries):
pie_chart = pygal.Pie(title = 'Number Of Software Projects Making Use Of The Library', x_title='', y_title='', x_label_rotation = -45, style = invis_style)
sorted_libraries = sorted(libraries, key=lambda library: library.popularity, reverse=True)
for library in sorted_libraries:
pie_chart.add(library.name, library.popularity)
return pie_chart
def generate_solid_gauge_chart_popularity(libraries):
gauge_chart = pygal.SolidGauge(title = 'Number Of Software Projects Making Use Of The Library', x_title='', y_title='', x_label_rotation = -45, style = invis_style)
sorted_libraries = sorted(libraries, key=lambda library: library.popularity, reverse=True)
top_popularity = 0
for library in sorted_libraries:
if library.popularity > top_popularity:
top_popularity = library.popularity
for library in sorted_libraries:
gauge_chart.add(library.name, [{'value':library.popularity, 'max_value':top_popularity}])
return gauge_chart
# release frequency
def generate_bar_chart_release_frequency(libraries):
bar_chart = pygal.Bar(title = 'Average Days Between Releases', x_title='', y_title='Days', x_label_rotation = -45, style = invis_style)
all_release_times = []
release_tuples = []
for library in libraries:
release_times = []
for i in range(len(library.release_frequency) - 1):
f_date = date(library.release_frequency[i].year, library.release_frequency[i].month, library.release_frequency[i].day)
l_date = date(library.release_frequency[i + 1].year, library.release_frequency[i + 1].month, library.release_frequency[i + 1].day)
time_to_release_all = l_date - f_date
time_to_release = time_to_release_all.days
release_times.append(time_to_release)
avg_release_time = sum(release_times)/len(release_times)
release_tuples.append((avg_release_time, len(release_times), library.name))
# bar_chart.add(library.name, avg_release_time)
# all_release_times.append(library.name + ': ' + str(len(release_times)))
sorted_release_tuples = sorted(release_tuples, key=lambda release: release[0], reverse=False)
for release_tuple in sorted_release_tuples:
bar_chart.add(release_tuple[2], release_tuple[0])
all_release_times.append(release_tuple[2] + ': ' + str(release_tuple[1]))
# bar_chart.x_labels = all_release_times
return bar_chart
def generate_box_chart_release_frequency(libraries):
box_chart = pygal.Box(title = 'Days Between Releases', x_title='Total Releases', y_title='Days', x_label_rotation = -45, style = invis_style)
all_release_times = []
release_tuples = []
for library in libraries:
release_times = []
for i in range(len(library.release_frequency) - 1):
f_date = date(library.release_frequency[i].year, library.release_frequency[i].month, library.release_frequency[i].day)
l_date = date(library.release_frequency[i + 1].year, library.release_frequency[i + 1].month, library.release_frequency[i + 1].day)
time_to_release_all = l_date - f_date
time_to_release = time_to_release_all.days
release_times.append(time_to_release)
avg_release_time = sum(release_times)/len(release_times)
release_tuples.append((release_times, avg_release_time, len(release_times), library.name))
# box_chart.add(library.name, release_times)
# all_release_times.append(library.name + ': ' + str(len(release_times)))
sorted_release_tuples = sorted(release_tuples, key=lambda release: release[1], reverse=False)
for release_tuple in sorted_release_tuples:
box_chart.add(release_tuple[3], release_tuple[0])
all_release_times.append(release_tuple[3] + ': ' + str(release_tuple[2]))
box_chart.x_labels = all_release_times
return box_chart
def generate_line_chart_release_frequency(libraries):
line_chart = pygal.Line(title = 'Average Days Between Releases', x_title='Release', y_title='Days', x_label_rotation = -45, style = invis_style)
all_release_times = []
for library in libraries:
release_times = []
for i in range(len(library.release_frequency) - 1):
f_date = date(library.release_frequency[i].year, library.release_frequency[i].month, library.release_frequency[i].day)
l_date = date(library.release_frequency[i + 1].year, library.release_frequency[i + 1].month, library.release_frequency[i + 1].day)
time_to_release_all = l_date - f_date
time_to_release = time_to_release_all.days
release_times.append(time_to_release)
line_chart.add(library.name, release_times)
all_release_times.append(library.name + ': ' + str(len(release_times)))
# line_chart.x_labels = all_release_times
return line_chart
# last modification date
def generate_bar_chart_last_modification(libraries):
bar_chart = pygal.Bar(title = 'Days Since Last Release', x_title='', y_title='Days', x_label_rotation = -45, style = invis_style)
now = datetime.now()
days_from_release = []
for library in libraries:
f_date = date(library.last_modification_date.year, library.last_modification_date.month, library.last_modification_date.day)
l_date = date(now.year, now.month, now.day)
time_from_release_all = l_date - f_date
time_from_release = time_from_release_all.days
days_from_release.append((time_from_release, library.name))
sorted_days_from_release = sorted(days_from_release, key=lambda library: library[0], reverse=False)
for library in sorted_days_from_release:
bar_chart.add(library[1], library[0])
return bar_chart
# performance
def generate_bar_chart_performance(libraries):
bar_chart = pygal.Bar(range=(0,1), title = 'Percentage Of Total Performance Related Issues', x_title='', y_title='Percentage', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.performance_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
for library in sorted_list:
bar_chart.add(library[1], library[0])
return bar_chart
def generate_box_chart_performance(libraries):
box_chart = pygal.Box(range=(0,1), title = 'Percentage Of Total Performance Related Issues', x_title='Total Performance Issues', y_title='Percentage', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.performance_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name, count))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
labels = []
for library in sorted_list:
box_chart.add(library[1], library[0])
labels.append(library[1] + ': ' + str(library[2]))
box_chart.x_labels = labels
return box_chart
def generate_solid_gauge_chart_performance(libraries):
gauge_chart = pygal.SolidGauge(title = 'Percentage Of Total Performance Related Issues', x_title='', y_title='', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.performance_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name, count))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
for library in sorted_list:
gauge_chart.add(library[1], [{'value':library[0], 'max_value':1}])
return gauge_chart
# security
def generate_bar_chart_security(libraries):
bar_chart = pygal.Bar(range=(0,1), title = 'Percentage Of Total Security Related Issues', x_title='', y_title='Percentage', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.security_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
for library in sorted_list:
bar_chart.add(library[1], library[0])
return bar_chart
def generate_box_chart_security(libraries):
box_chart = pygal.Box(range=(0,1), title = 'Percentage Of Total Security Related Issues', x_title='Total Security Issues', y_title='Percentage', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.security_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name, count))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
labels = []
for library in sorted_list:
box_chart.add(library[1], library[0])
labels.append(library[1] + ': ' + str(library[2]))
box_chart.x_labels = labels
return box_chart
def generate_solid_gauge_chart_security(libraries):
gauge_chart = pygal.SolidGauge(title = 'Percentage Of Total Security Related Issues', x_title='', y_title='', x_label_rotation = -45, style = invis_style)
count = 0
unsorted_list = []
for library in libraries:
for issue in library.issue_data:
if issue.security_issue == 'Yes':
count += 1
unsorted_list.append((count/len(library.issue_data), library.name, count))
count = 0
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
for library in sorted_list:
gauge_chart.add(library[1], [{'value':library[0], 'max_value':1}])
return gauge_chart
# issue closing time
def generate_xy_chart_issue_closing_time(libraries):
xy_chart = pygal.XY(dots_size=4, range=(0,1), title = 'Average Time To Close An Issue vs Percentage of Issues Not Closed', x_title='Average Days To Close Issue', y_title='Percentage Of Issues Closed', x_label_rotation = -45, style = invis_style)
for library in libraries:
issue_count = len(library.issue_data)
not_responded_count = 0
response_times = []
num_of_issues = 0
for issue in library.issue_data:
if issue.issue_closing_date == None:
not_responded_count += 1
else:
response_times.append(issue.time_to_close)
num_of_issues += 1
avg = sum(response_times) / len(response_times)
per = not_responded_count / num_of_issues
xy_chart.add(library.name, [(avg, per)] )
return xy_chart
def generate_box_chart_issue_closing_time(libraries):
box_chart = pygal.Box(title = 'Days Taken To Close Issue', x_title='Total Issues Closed', y_title='Days', x_label_rotation = -45, style = invis_style)
all_response_times = []
for library in libraries:
response_times = []
for issue in library.issue_data:
if issue.issue_closing_date != None:
response_times.append(issue.time_to_close)
box_chart.add(library.name, response_times)
all_response_times.append(library.name + ': ' + str(len(response_times)))
box_chart.x_labels = all_response_times
return box_chart
# issue response time
def generate_xy_chart_issue_response_time(libraries):
xy_chart = pygal.XY(dots_size=4, range=(0,1), title = 'Average Time To Respond To An Issue vs Percentage of Issues Not Responded To' , x_title='Average Days To Response', y_title='Percentage of Issues Not Responded To', x_label_rotation = -45, style = invis_style)
for library in libraries:
issue_count = len(library.issue_data)
not_responded_count = 0
response_times = []
num_of_issues = 0
for issue in library.issue_data:
if issue.date_of_first_comment == None:
not_responded_count += 1
else:
response_times.append(issue.time_to_response)
num_of_issues += 1
avg = sum(response_times) / len(response_times)
per = not_responded_count / num_of_issues
xy_chart.add(library.name, [(avg, per)] )
return xy_chart
def generate_box_chart_issue_response_time(libraries):
box_chart = pygal.Box(title = 'Days Taken To Respond To Issue', x_title='Total Issues Responsed To', y_title='Days', x_label_rotation = -45, style = invis_style)
all_response_times = []
for library in libraries:
response_times = []
for issue in library.issue_data:
if issue.issue_closing_date != None:
response_times.append(issue.time_to_response)
box_chart.add(library.name, response_times )
all_response_times.append(library.name + ': ' + str(len(response_times)))
box_chart.x_labels = all_response_times
return box_chart
# backwards compatability
def generate_bar_chart_backwards_compatibility(libraries):
bar_chart = pygal.Bar(title = 'Average Number Of Breaking Changes Per Version', x_title='', y_title='', x_label_rotation = -45, style = invis_style)
unsorted_list = []
for library in libraries:
unsorted_list.append((sum(library.backward_compatibilty)/len(library.backward_compatibilty), library.name))
sorted_list = sorted(unsorted_list, key=lambda library: library[0], reverse=False)
for library in sorted_list:
bar_chart.add(library[1], library[0])
return bar_chart
def generate_box_chart_backwards_compatibility(libraries):
box_chart = pygal.Box(title = 'Number Of Breaking Changes Per Version', x_title='Total Number Of Versions', y_title='', x_label_rotation = -45, style = invis_style)
labels = []
for library in libraries:
box_chart.add(library.name, library.backward_compatibilty)
labels.append(library.name + ': ' + str(len(library.backward_compatibilty)))
box_chart.x_labels = labels
return box_chart
def generate_line_chart_backwards_compatibility(libraries):
line_chart = pygal.Line(title = 'Number Of Breaking Changes Per Version', x_title='Version', y_title='Number Of Breaking Changes', x_label_rotation = -45, style = invis_style)
for library in libraries:
line_chart.add(library.name, library.backward_compatibilty)
return line_chart
# last discussed on stack overflow
def generate_bar_chart_last_discussed(libraries):
bar_chart = pygal.Bar(title = 'Days Since Last Discussed On Stack Overflow', x_title='', y_title='Days', x_label_rotation = -45, style = invis_style)
now = datetime.now()
days_from_release = []
for library in libraries:
if library.last_discussed_on_stack_overflow[0] != None:
f_date = date(library.last_discussed_on_stack_overflow[0].year, library.last_discussed_on_stack_overflow[0].month, library.last_discussed_on_stack_overflow[0].day)
l_date = date(now.year, now.month, now.day)
time_from_release_all = l_date - f_date
time_from_release = time_from_release_all.days
days_from_release.append((time_from_release, library.name))
sorted_days_from_release = sorted(days_from_release, key=lambda library: library[0], reverse=False)
for library in sorted_days_from_release:
bar_chart.add(library[1], library[0])
return bar_chart
def generate_box_chart_last_discussed(libraries):
box_chart = pygal.Box(title = 'Days Since Last Discussed On Stack Overflow', x_title='Total Discussions', y_title='', x_label_rotation = -45, style = invis_style)
now = datetime.now()
unsorted_list = []
for library in libraries:
if library.last_discussed_on_stack_overflow[0] != None:
f_date = date(library.last_discussed_on_stack_overflow[0].year, library.last_discussed_on_stack_overflow[0].month, library.last_discussed_on_stack_overflow[0].day)
l_date = date(now.year, now.month, now.day)
time_from_release_all = l_date - f_date
time_from_release = time_from_release_all.days
unsorted_list.append((time_from_release, time_from_release, library.name, library.last_discussed_on_stack_overflow[1]))
else:
unsorted_list.append(([], 0, library.name, library.last_discussed_on_stack_overflow[1]))
sorted_list = sorted(unsorted_list, key=lambda library: library[1], reverse=False)
labels = []
for library in sorted_list:
box_chart.add(library[2], library[0])
labels.append(library[2] + ': ' + str(library[3]))
box_chart.x_labels = labels
return box_chart
def generate_scatter_chart_last_discussed(libraries):
xy_chart = pygal.XY(dots_size=4, stroke = False, title = 'Days Since Last Discussed On Stack Overflow', x_title='Days', y_title='Total Discussions', x_label_rotation = -45, style = invis_style)
now = datetime.now()
unsorted_list = []
for library in libraries:
if library.last_discussed_on_stack_overflow[0] != None:
f_date = date(library.last_discussed_on_stack_overflow[0].year, library.last_discussed_on_stack_overflow[0].month, library.last_discussed_on_stack_overflow[0].day)
l_date = date(now.year, now.month, now.day)
time_from_release_all = l_date - f_date
time_from_release = time_from_release_all.days
unsorted_list.append((time_from_release, library.name, library.last_discussed_on_stack_overflow[1]))
else:
unsorted_list.append((0, library.name, library.last_discussed_on_stack_overflow[1]))
sorted_list = sorted(unsorted_list, key=lambda library: library[1], reverse=False)
labels = []
for library in sorted_list:
xy_chart.add(library[1], [(library[0], library[2])])
labels.append(library[1] + ': ' + str(library[2]))
# xy_chart.x_labels = labels
return xy_chart |
# isinstance("10", str)
# isinstance(10, int)
# isinstance({"name":"Mrx"}, dict)
# isinstance(["name","Mrx"], list)
def isList(data):
return isinstance(data, list)
def isDict(data):
return isinstance(data, dict)
def isString(data):
return isinstance(data, str)
def isInt(data):
return isinstance(data, int) |
# conditional probability density function python
# https://www.google.com/search?q=conditional+probability+density+function+python&sxsrf=ALeKk01hnlyDVUrgQH2JuGrK7fkCeVELvQ%3A1621257267286&ei=M2yiYKD4ENPemAWJ9ICoCQ&oq=conditional+probability+density+function+python&gs_lcp=Cgdnd3Mtd2l6EAMyBggAEBYQHjoHCCMQsAMQJzoHCAAQRxCwAzoHCAAQsAMQQzoCCABQ0w1Yqhdg2BhoAXACeACAAd8BiAHHCZIBBTAuNS4ymAEAoAEBqgEHZ3dzLXdpesgBCsABAQ&sclient=gws-wiz&ved=0ahUKEwjg3YX85dDwAhVTL6YKHQk6AJUQ4dUDCA4&uact=5
|
import uuid
from django.conf import settings
from django.db import models
from pagseguro.signals import notificacao_recebida
from .managers import CartManager, PurchaseManager
def generate_code():
return uuid.uuid4()
class BaseModel(models.Model):
id = models.UUIDField(primary_key=True, editable=False, default=generate_code)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Event(BaseModel):
title = models.CharField('título', max_length=128)
description = models.TextField('descrição')
def __str__(self):
return self.title
class Meta:
ordering = ['title']
verbose_name = 'evento'
verbose_name_plural = 'eventos'
class Ticket(BaseModel):
event = models.ForeignKey('Event', on_delete=models.CASCADE, verbose_name='evento', related_name='tickets')
title = models.CharField('título', max_length=128)
price = models.DecimalField('preço', max_digits=10, decimal_places=2)
def __str__(self):
return self.title
class Meta:
ordering = ['title']
verbose_name = 'ticket'
verbose_name_plural = 'tickets'
class Cart(BaseModel):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name='usuário', related_name='carts')
closed = models.BooleanField('carrinho finalizado', db_index=True, default=False)
objects = CartManager()
def __str__(self):
return str(self.id)
class Meta:
ordering = ['-created_at']
verbose_name = 'carrinho de compra'
verbose_name_plural = 'carrinhos de compra'
@property
def price(self):
return sum([cart_item.price for cart_item in self.cart_items.all()])
class CartItem(BaseModel):
cart = models.ForeignKey('Cart', on_delete=models.CASCADE, verbose_name='carrinho', related_name='cart_items')
ticket = models.ForeignKey('Ticket', on_delete=models.CASCADE, verbose_name='ticket', related_name='cart_items')
quantity = models.SmallIntegerField('quantidade', default=1)
unit_price = models.DecimalField('preço unitário', max_digits=10, decimal_places=2)
def __str__(self):
return '{} - {} - {}'.format(self.cart, self.ticket, self.price)
class Meta:
ordering = ['id']
verbose_name = 'item do carrinho de compra'
verbose_name_plural = 'itens do carrinho de compra'
unique_together = ('cart', 'ticket')
@property
def price(self):
return self.quantity * self.unit_price
PURCHASE_STATUS_CHOICES = (
('pending', 'Pendente'),
('paid', 'Pago'),
('canceled', 'Cancelado'),
)
class Purchase(BaseModel):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name='usuário', related_name='purchases')
cart = models.ForeignKey('Cart', on_delete=models.CASCADE, verbose_name='carrinho', related_name='purchases')
price = models.DecimalField('preço', max_digits=10, decimal_places=2)
status = models.CharField('status da compra', max_length=16, default='pending', choices=PURCHASE_STATUS_CHOICES)
pagseguro_redirect_url = models.URLField('url do pagseguro', max_length=255, blank=True)
objects = PurchaseManager()
def __str__(self):
return str(self.id)
class Meta:
ordering = ['-created_at']
verbose_name = 'compra'
verbose_name_plural = 'compras'
def update_purchase_status(sender, transaction, **kwargs):
Purchase.objects.update_purchase_status(transaction)
notificacao_recebida.connect(update_purchase_status)
|
import csv
import ast
from .gate import Gate
from .net import Net
from copy import deepcopy
class Board:
"""
handles loading of gates, nets and grid
main methods for use in algorithms
"""
def __init__(self, print_csv, netlist_csv):
self.gates = {}
self.gate_locations = []
self.nets = []
self.width = 0
self.length = 0
self.height = 7
self.cost = 0
# load gates and nets
self.load_gates(print_csv)
self.load_nets(netlist_csv)
# create grid
self.grid = self.create_grid(self.width, self.length, self.height)
# save start state of grid
self.grid_reserve = deepcopy(self.grid)
def load_gates(self, filename):
"""loads gates from print csv file"""
# try to open file
try:
with open(filename) as file:
data = csv.reader(file)
next(data)
# for every gate
for line in data:
# get id and coordinates
gate_id, x, y, z = int(line[0]), int(line[1]), int(line[2]), 0
# create Gate object
self.gates[gate_id] = Gate(gate_id, (x, y, z))
self.gate_locations.append((x, y, z))
# set width and length of grid
if x > self.width:
self.width = x + 1
if y > self.length:
self.length = y + 1
except OSError:
print(f"File {filename} not found")
raise SystemExit
def load_nets(self, filename):
"""loads nets from netlist csv file"""
# try to open file
try:
with open(filename) as file:
data = csv.reader(file)
next(data)
# for every net
for i, line in enumerate(data):
# if line is not empty
if line:
# gates connected by net
gate_a, gate_b = self.gates[int(line[0])], self.gates[int(line[1])]
# create Net object
self.nets.append(Net(self, i, (gate_a, gate_b)))
# count number of connections of gate
self.gates[gate_a.gate_id].n_connections += 1
self.gates[gate_b.gate_id].n_connections += 1
# set priority of nets
for net in self.nets:
net.priority_num = net.connect[0].n_connections + net.connect[1].n_connections
except OSError:
print(f"File {filename} not found")
raise SystemExit
def create_grid(self, width, length, height):
"""creates grid"""
# create 3D grid
grid = [[[[] for z in range(height + 1)]
for y in range(length + 1)]
for x in range(width + 1)]
# add gate locations to grid
for gate in self.gates:
x, y, z = self.gates[gate].loc
grid[x][y][z].append(-1)
return grid
def reset_grid(self):
"""resets grid"""
self.grid = deepcopy(self.grid_reserve)
def read_output(self, output_csv):
"""reads output.csv into board"""
with open(output_csv) as file:
data = csv.reader(file)
next(data)
# for every net in output
for line in data:
# stop if at end of file
if line[0][0] != '(':
break
# search for matching net
match = False
for net in self.nets:
# if matching net found
if line[0] == str(net.connect).replace(" ", ""):
# save route
net.route = ast.literal_eval(line[1])
net.length = len(net.route) - 1
# add net to grid
for x, y, z in net.route:
self.grid[x][y][z].append(net)
# match found
match = True
break
if match == False:
print("One or more nets in netlist and output.csv do not match")
raise SystemExit
def add_net(self, net):
"""adds net to grid"""
for x, y, z in net.route:
self.grid[x][y][z].append(net)
net.length = len(net.route) - 1
def remove_net(self, net):
"""removes net from grid"""
# remove each wire from grid
for x, y, z in net.route:
self.grid[x][y][z].remove(net)
# reset net length to 0
net.length = 0
net.route = []
|
#!/usr/bin/env python3
"""
Update Wikifeat couchdb databases from 0.3a to 0.4.0a
Note: Requires python3
Changes:
1. Added by name search to user query design document
"""
import json
import common
import sys
user_ddoc = 'user_queries'
usersByName = dict()
usersByName['map'] = """
function(doc){
if(doc.type===\"user\" && doc.userPublic){
emit(doc.name, {name: doc.name, roles: doc.roles, userPublic: doc.userPublic});
if(doc.userPublic.lastName && doc.name !== doc.userPublic.lastName){
emit(doc.userPublic.lastName, {name: doc.name, roles: doc.roles, userPublic: doc.userPublic});
}
if(doc.userPublic.firstName && doc.name !== doc.userPublic.firstName && doc.userPublic.lastName !== doc.userPublic.firstName){
emit(doc.userPublic.firstName, {name: doc.name, roles: doc.roles, userPublic: doc.userPublic});
}
}
}
"""
usersByName['reduce'] = "_count"
args = common.parse_args()
conn = common.get_connection(args.use_ssl, args.couch_server, args.couch_port)
credentials = common.get_credentials(args.adminuser, args.adminpass)
get_headers = common.get_headers(credentials)
put_headers = common.put_headers(credentials)
conn.connect()
#Update the _users design document
ddoc_uri = '/_users/_design/' + user_ddoc
conn.request("GET", ddoc_uri, headers=get_headers)
resp = conn.getresponse()
if resp.getcode() != 200:
print("Fetch of user design doc failed")
sys.exit(1)
ddoc = common.decode_response(resp)
ddoc['views']['usersByName'] = usersByName
req_body = json.dumps(ddoc)
conn.request("PUT", ddoc_uri, body=req_body, headers=put_headers)
resp = conn.getresponse()
resp_body = common.decode_response(resp)
if resp.getcode() == 201:
print("Update of user design doc successful")
else:
print("Update of the user design doc failed.")
|
from .base_manager import BaseManager
from .one_a import ManagerOneA
from .one_b import ManagerOneB
from .two import ManagerTwo
from .three import ManagerThree
|
import cv2
import numpy as np
import argparse, sys, os
##from GUIdriver import *
import pandas as pd
def endprogram():
print ("\nProgram terminated!")
sys.exit()
text = str(ImageFile)
print ("\n*********************\nImage : " + ImageFile + "\n*********************")
img = cv2.imread(text)
img = cv2.resize(img ,((int)(img.shape[1]/5),(int)(img.shape[0]/5)))
original = img.copy()
neworiginal = img.copy()
cv2.imshow('original',img)
#--
p = 0
for i in range(img.shape[0]):
for j in range(img.shape[1]):
B = img[i][j][0]
G = img[i][j][1]
R = img[i][j][2]
if (B > 110 and G > 110 and R > 110):
p += 1
#--
#total amount of pixel
totalpixels = img.shape[0]*img.shape[1]
per_white = 100 * p/totalpixels
'''
print 'percantage of white: ' + str(per_white) + '\n'
print 'total: ' + str(totalpixels) + '\n'
print 'white: ' + str(p) + '\n'
'''
if per_white > 10:
img[i][j] = [200,200,200]
cv2.imshow('color change', img)
#Edge detection (mean shift)
blur1 = cv2.GaussianBlur(img,(3,3),1)
newimg = np.zeros((img.shape[0], img.shape[1],3),np.uint8)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER , 10 ,1.0)
img = cv2.pyrMeanShiftFiltering(blur1, 20, 30, newimg, 0, criteria)
cv2.imshow('means shift image',img)
blur = cv2.GaussianBlur(img,(11,11),1)
canny = cv2.Canny(blur, 160, 290)
canny = cv2.cvtColor(canny,cv2.COLOR_GRAY2BGR)
#countour
bordered = cv2.cvtColor(canny,cv2.COLOR_BGR2GRAY)
contours,hierarchy = cv2.findContours(bordered, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
maxC = 0
for x in range(len(contours)):
if len(contours[x]) > maxC:
maxC = len(contours[x])
maxid = x
perimeter = cv2.arcLength(contours[maxid],True)
Tarea = cv2.contourArea(contours[maxid])
cv2.drawContours(neworiginal,contours[maxid],-1,(0,0,255))
cv2.imshow('Contour',neworiginal)
#cv2.imwrite('Contour complete leaf.jpg',neworiginal)
#Region of image
height, width, _ = canny.shape
min_x, min_y = width, height
max_x = max_y = 0
frame = canny.copy()
# Put image on frame
for contour, hier in zip(contours, hierarchy):
(x,y,w,h) = cv2.boundingRect(contours[maxid])
min_x, max_x = min(x, min_x), max(x+w, max_x)
min_y, max_y = min(y, min_y), max(y+h, max_y)
if w > 80 and h > 80:
roi = img[y:y+h , x:x+w]
originalroi = original[y:y+h , x:x+w]
if (max_x - min_x > 0 and max_y - min_y > 0):
roi = img[min_y:max_y , min_x:max_x]
originalroi = original[min_y:max_y , min_x:max_x]
cv2.imshow('ROI', frame)
cv2.imshow('rectangle ROI', roi)
img = roi
#Manipulate colorspace
imghls = cv2.cvtColor(roi, cv2.COLOR_BGR2HLS)
cv2.imshow('HLS', imghls)
imghls[np.where((imghls==[30,200,2]).all(axis=2))] = [0,200,0]
cv2.imshow('new HLS', imghls)
huehls = imghls[:,:,0]
cv2.imshow('img_hue hls',huehls)
huehls[np.where(huehls==[0])] = [35]
cv2.imshow('img_hue with my mask',huehls)
#Thresholding on hue image
ret, thresh = cv2.threshold(huehls,28,255,cv2.THRESH_BINARY_INV)
cv2.imshow('thresh', thresh)
#Masking thresholded image from original image
mask = cv2.bitwise_and(originalroi,originalroi,mask = thresh)
cv2.imshow('masked out img',mask)
#Borders for infected regions
contours,heirarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
Infarea = 0
for x in range(len(contours)):
cv2.drawContours(originalroi,contours[x],-1,(0,0,255))
cv2.imshow('Contour masked',originalroi)
#Calculating area of infected region
Infarea += cv2.contourArea(contours[x])
if Infarea > Tarea:
Tarea = img.shape[0]*img.shape[1]
print ('_________________________________________\n Leaf border lenght: %.2f' %(perimeter)
+ '\n_________________________________________')
print ('_________________________________________\n Leaf area: %.2f' %(Tarea)
+ '\n_________________________________________')
#Finding the percentage of infection in the leaf
print ('_________________________________________\n Size of infected area: %.2f' %(Infarea)
+ '\n_________________________________________')
try:
per = 100 * Infarea/Tarea
except ZeroDivisionError:
per = 0
print ('_________________________________________\n Infected Percetage: %.2f' %(per)
+ '\n_________________________________________')
cv2.imshow('orig',original)
#******************************************************************************
#Export dataset
print("\nDo you want to run the program?:")
n = cv2.waitKey(0) & 0xFF
if n == ord('q' or 'Q'):
endprogram()
#import csv file library
import csv
directory = 'datasetlog'
filename = directory+'/Datasetunlabelledlog.csv'
imgid = "/".join(text.split('/')[-2:])
while True:
if n == ord('y'or'Y'):
fieldnames = ['fold num', 'imgid', 'feature1', 'feature2', 'feature3']
print ('Appending to ' + str(filename)+ '...')
try:
log = pd.read_csv(filename)
logfn = int(log.tail(1)['fold num'])
foldnum = (logfn+1)%10
L = [str(foldnum), imgid, str(Tarea), str(Infarea), str(perimeter)]
my_df = pd.DataFrame([L])
my_df.to_csv(filename, mode='a', index=False, header=False)
print ('\nFile ' + str(filename)+ ' updated!' )
except IOError:
if directory not in os.listdir():
os.system('mkdir ' + directory)
foldnum = 0
L = [str(foldnum), imgid, str(Tarea), str(Infarea), str(perimeter)]
my_df = pd.DataFrame([fieldnames, L])
my_df.to_csv(filename, index=False, header=False)
print ('\nFile ' + str(filename)+ ' updated!' )
finally:
import classifier
endprogram()
elif n == ord('n' or 'N') :
print ('File not updated! \nSuccessfully terminated!')
break
else:
print ('Invalid input!')
break
|
#DejaVuSansCondensed.ttf
#DejaVuSansCondensed.pkl
#DejaVuSansCondensed.cw127.pkl
#The above files help this file run. The purpose of these files is to use unicode characters.
#If you do not want to use these files, activate the comment line and delete the add_font line. Replace 'Dejavu' in set_font with 'Times'.
import os
import sys
from fpdf import FPDF
def TXTtoPDF(_in,_out):
file_in = os.path.abspath(_in)
file_out = os.path.abspath(_out)
file = open(file_in,'r',encoding='utf-8',errors='ignore')
pdf = FPDF(format='A4')
pdf.add_page()
pdf.add_font('DejaVu', '', 'DejaVuSansCondensed.ttf', uni=True)
for text in file:
#text=text.encode('latin1','ignore').decode('iso-8859-1')
text = u'' + text
if len(text) <= 30:#title
pdf.set_font('DejaVu', '', size=15)
pdf.multi_cell(w=200, h=10, txt=text, align='C')
else:#paragraph
pdf.set_font('DejaVu', size=12)
pdf.multi_cell(w=0, h=10, txt=text, align='L')
pdf.output(file_out)
if __name__ == "__main__":
TXTtoPDF(sys.argv[1],sys.argv[2]) |
# Generated by Django 2.0.1 on 2018-02-14 19:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0005_auto_20180202_1116'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'verbose_name': 'Usuário', 'verbose_name_plural': 'Usuários'},
),
migrations.AlterField(
model_name='user',
name='date_joined',
field=models.DateTimeField(auto_now_add=True, verbose_name='data de inclusão'),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=254, unique=True, verbose_name='email'),
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(max_length=60, verbose_name='nome'),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(max_length=200, verbose_name='sobrenome'),
),
]
|
from wordcloud import WordCloud
from collections import Counter
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import numpy as np
import seaborn as sns
import missingno as msno
import os
# To Do: 1) 시각화 default setting 설정
def test():
print('\n', )
print('domain > eda에 visualize입니다.')
FILE = os.path.dirname(__file__)
FONT_PATH = os.environ.get('FONT_PATH', os.path.join(FILE, 'NanumGothic.ttf'))
############################## matplotlib : default font setting ################################
# reference: https://jehyunlee.github.io/2020/02/13/Python-DS-2-matplotlib_defaults_and_fonts/
# print('버전: ', mpl.__version__)
# print('설치 위치: ', mpl.__file__)
# print('설정 위치: ', mpl.get_configdir())
# print('캐시 위치: ', mpl.get_cachedir())
# print('설정파일 위치: ', mpl.matplotlib_fname())
# [(f.name, f.fname) for f in fm.fontManager.ttflist if 'Nanum' in f.name]
# font_list = fm.findSystemFonts(fontpaths=None, fontext='ttf')
# print(font_list[:100])
font_prop = fm.FontProperties(fname=FONT_PATH, size=10).get_name()
plt.rc('font', family=font_prop)
fm._rebuild()
mpl.rcParams['axes.unicode_minus'] = False
# print(font_prop)
#################################################################################################
# 우선 아래 설정으로 한글 폰트 출력 확인
mpl.rcParams['font.family'] = "AppleGothic"
def get_cloud(df, target_column, conditional_value='', conditional_column='all'):
if conditional_column=='all':
text = df[target_column].to_list()
else:
text = df[df[conditional_column]==conditional_value][target_column].to_list()
count = Counter(text)
words = dict(count.most_common())
plt.figure(figsize=(14,10)) #이미지 사이즈 지정
wordcloud = WordCloud(font_path=FONT_PATH, background_color='white').generate_from_frequencies(words)
plt.imshow(wordcloud,interpolation='lanczos')
plt.axis('off')
#plt.set_title(str(target_column), fontsize=20)
plt.show()
def get_missing_matrix(df):
msno.matrix(df)
plt.show()
def get_missing_bar(df):
msno.bar(df)
plt.show()
def get_top_counts(df, col, num):
ax = sns.countplot(y=col, data=df, order=df[col].value_counts()[:num].index)
plt.title(f'Top {num} counts', fontproperties = font_prop)
plt.show()
def SentanceInspect(_column):
maxval = 0
list = _column.tolist()
tokenized_list = [r.split() for r in list]
sentence_len_by_token = [len(t) for t in tokenized_list]
sentence_len_by_eumjeol = [len(s.replace(' ', '')) for s in list]
for s in list:
if len(s.replace(' ', '')) > 15000:
if maxval < len(s):
maxval = len(s)
# print('len: ',len(s))
# print('s: ',s)
for t in tokenized_list:
if len(t) == 0:
print()
print('maxval : ', maxval)
plt.figure(figsize=(12, 5))
plt.hist(sentence_len_by_token, bins=50, alpha=0.5, color="r", label="word")
plt.hist(sentence_len_by_eumjeol, bins=50, alpha=0.5, color="b", label="aplt.yscallphabet")
plt.yscale('log', nonposy='clip')
plt.title(_column.name)
plt.xlabel('red:token / blue:eumjeol length')
plt.ylabel('number of sentences')
plt.show()
print('\n', )
print('칼럼명 : {}'.format(_column.name))
print('토큰 최대 길이 : {}'.format(np.max(sentence_len_by_token)))
print('토큰 최소 길이 : {}'.format(np.min(sentence_len_by_token)))
print('토큰 평균 길이 : {:.2f}'.format(np.mean(sentence_len_by_token)))
print('토큰 길이 표준편차 : {:.2f}'.format(np.std(sentence_len_by_token)))
print('토큰 중간 길이 : {}'.format(np.median(sentence_len_by_token)))
print('제 1사분위 길이 : {}'.format(np.percentile(sentence_len_by_token, 25)))
print('제 3사분위 길이 : {}'.format(np.percentile(sentence_len_by_token, 75)))
|
from .parse import parse_expression, ParseException
from .tokenize import tokenize, TokenizeException
from .node import Node, CalculationException
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_l3_interface
version_added: "2.4"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Manage L3 interfaces on VyOS network devices
description:
- This module provides declarative management of L3 interfaces
on VyOS network devices.
notes:
- Tested against VYOS 1.1.7
options:
name:
description:
- Name of the L3 interface.
ipv4:
description:
- IPv4 of the L3 interface.
ipv6:
description:
- IPv6 of the L3 interface.
aggregate:
description: List of L3 interfaces definitions
state:
description:
- State of the L3 interface configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: Set eth0 IPv4 address
vyos_l3_interface:
name: eth0
ipv4: 192.168.0.1/24
- name: Remove eth0 IPv4 address
vyos_l3_interface:
name: eth0
state: absent
- name: Set IP addresses on aggregate
vyos_l3_interface:
aggregate:
- { name: eth1, ipv4: 192.168.2.10/24 }
- { name: eth2, ipv4: 192.168.3.10/24, ipv6: "fd5d:12c9:2201:1::1/64" }
- name: Remove IP addresses on aggregate
vyos_l3_interface:
aggregate:
- { name: eth1, ipv4: 192.168.2.10/24 }
- { name: eth2, ipv4: 192.168.3.10/24, ipv6: "fd5d:12c9:2201:1::1/64" }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- set interfaces ethernet eth0 address '192.168.0.1/24'
"""
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import remove_default_spec
from ansible.module_utils.vyos import load_config, run_commands
from ansible.module_utils.vyos import vyos_argument_spec, check_args
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
return None
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
for w in want:
name = w['name']
ipv4 = w['ipv4']
ipv6 = w['ipv6']
state = w['state']
obj_in_have = search_obj_in_list(name, have)
if state == 'absent' and obj_in_have:
if not ipv4 and not ipv6 and (obj_in_have['ipv4'] or obj_in_have['ipv6']):
commands.append('delete interfaces ethernet ' + name + ' address')
else:
if ipv4 and obj_in_have['ipv4']:
commands.append('delete interfaces ethernet ' + name + ' address ' + ipv4)
if ipv6 and obj_in_have['ipv6']:
commands.append('delete interfaces ethernet ' + name + ' address ' + ipv6)
elif (state == 'present' and obj_in_have):
if ipv4 and ipv4 != obj_in_have['ipv4']:
commands.append('set interfaces ethernet ' + name + ' address ' +
ipv4)
if ipv6 and ipv6 != obj_in_have['ipv6']:
commands.append('set interfaces ethernet ' + name + ' address ' +
ipv6)
return commands
def map_config_to_obj(module):
obj = []
output = run_commands(module, ['show interfaces ethernet'])
lines = output[0].splitlines()
if len(lines) > 3:
for line in lines[3:]:
splitted_line = line.split()
if len(splitted_line) > 1:
name = splitted_line[0]
address = splitted_line[1]
if address == '-':
address = None
if address is not None and ':' not in address:
obj.append({'name': name,
'ipv4': address,
'ipv6': None})
else:
obj.append({'name': name,
'ipv6': address,
'ipv4': None})
else:
obj[-1]['ipv6'] = splitted_line[0]
return obj
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
obj.append(item.copy())
else:
obj.append({
'name': module.params['name'],
'ipv4': module.params['ipv4'],
'ipv6': module.params['ipv6'],
'state': module.params['state']
})
return obj
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
ipv4=dict(),
ipv6=dict(),
state=dict(default='present',
choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(vyos_argument_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
commit = not module.check_mode
load_config(module, commands, commit=commit)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_common_bandwidth_profile import TapiCommonBandwidthProfile # noqa: F401,E501
from tapi_server.models.tapi_common_capacity_value import TapiCommonCapacityValue # noqa: F401,E501
from tapi_server import util
class TapiCommonCapacity(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, bandwidth_profile=None, total_size=None): # noqa: E501
"""TapiCommonCapacity - a model defined in OpenAPI
:param bandwidth_profile: The bandwidth_profile of this TapiCommonCapacity. # noqa: E501
:type bandwidth_profile: TapiCommonBandwidthProfile
:param total_size: The total_size of this TapiCommonCapacity. # noqa: E501
:type total_size: TapiCommonCapacityValue
"""
self.openapi_types = {
'bandwidth_profile': TapiCommonBandwidthProfile,
'total_size': TapiCommonCapacityValue
}
self.attribute_map = {
'bandwidth_profile': 'bandwidth-profile',
'total_size': 'total-size'
}
self._bandwidth_profile = bandwidth_profile
self._total_size = total_size
@classmethod
def from_dict(cls, dikt) -> 'TapiCommonCapacity':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.common.Capacity of this TapiCommonCapacity. # noqa: E501
:rtype: TapiCommonCapacity
"""
return util.deserialize_model(dikt, cls)
@property
def bandwidth_profile(self):
"""Gets the bandwidth_profile of this TapiCommonCapacity.
:return: The bandwidth_profile of this TapiCommonCapacity.
:rtype: TapiCommonBandwidthProfile
"""
return self._bandwidth_profile
@bandwidth_profile.setter
def bandwidth_profile(self, bandwidth_profile):
"""Sets the bandwidth_profile of this TapiCommonCapacity.
:param bandwidth_profile: The bandwidth_profile of this TapiCommonCapacity.
:type bandwidth_profile: TapiCommonBandwidthProfile
"""
self._bandwidth_profile = bandwidth_profile
@property
def total_size(self):
"""Gets the total_size of this TapiCommonCapacity.
:return: The total_size of this TapiCommonCapacity.
:rtype: TapiCommonCapacityValue
"""
return self._total_size
@total_size.setter
def total_size(self, total_size):
"""Sets the total_size of this TapiCommonCapacity.
:param total_size: The total_size of this TapiCommonCapacity.
:type total_size: TapiCommonCapacityValue
"""
self._total_size = total_size
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
# Bubble Sort 冒泡排序
# 冒泡排序只会操作相邻的两个数据。
# 自然界中, 气泡的密度比水小,在水中,越大的气泡受到的浮力也就越大, 就会先到达水面
# 冒泡排序只会操作相邻的两个数据。
# 每次冒泡操作都会**对相邻的两个元素进行比较,看是否满足大小关系要求**。如果不满足就让它俩互换。
# 一趟冒泡会让至少一个元素移动到它应该在的位置,重复 n 趟,就完成了 n 个数据的排序工作。
# #### 分析
# 1. 原地排序算法: 只涉及相邻数据的交换, 需要一个常量级的临时空间, 空间复杂度为O(1)
# 2. 稳定的: 相等的元素不会进行交换,所以等值的元素在排序前后不会改变顺序
# 3. 时间复杂度: 最好时只要进行一趟冒泡, O(n)
# 最坏情况要进行n趟冒泡,O(n^2)
# 平均情况作简单估算,
# 冒泡排序包含比较和交换两个操作, 比较只需读值不需要写内存, 所以我们考虑交换操作,交换一次, 逆序度就减一.
# 逆序度=满有序度-有序度, 所以逆序度一定小于满有序, 也就是n(n-1)/2,
# 逆序度为0时, 不需进行交换操作,取两者中间值n(n-1)/4
def bubbleSort(arr):
length = len(arr)
if length <= 0:
return arr
for i in range(length):
# 每一趟冒泡都会通过交换,排好一个元素, 当没有交换操作时就意味着整个数组是有序的
isCompare = False
for j in range(length - i - 1):
if arr[j] > arr[j + 1]:
arr[j], arr[j + 1] = arr[j + 1], arr[j]
isCompare = True
if not isCompare:
break
return arr
# Insert Sort 插入排序
# 像纸牌游戏,得到新牌后插到合适的位置
# 将数组中的数据分为两个区间,已排序区间和未排序区间。初始已排序区间只有一个元素,就是数组的第一个元素。
# 插入算法的核心思想是**取未排序区间中的元素,在已排序区间中找到合适的插入位置将其插入**,并保证已排序区间数据一直有序。
# 重复这个过程,直到未排序区间中元素为空
# #### 分析
# 1. 原地排序算法: 只涉及数据的比较和移动, 不需要额外的临时空间, 空间复杂度为O(1)
# 2. 稳定的: 进行严格比较时,即相等的元素不进行交换,等值的元素在排序前后不会改变顺序
# 3. 时间复杂度: 最好时,比较一个数据就能确定插入的位置,只是进行一次从头到尾的遍历, O(n)
# 最坏情况,每次插入都相当于在数组的第一个位置插入新的数据,O(n^2)
# 平均情况作简单估算,
# 在有序数组中插入一个值的时间复杂度是O(n# #### 分析
# 1. 原地排序算法: 只涉及数据的比较和移动, 不需要额外的临时空间, 空间复杂度为O(1)
# 2. 稳定的: 进行严格比较时,即相等的元素不进行交换,等值的元素在排序前后不会改变顺序
# 3. 时间复杂度: 最好时,比较一个数据就能确定插入的位置,只是进行一次从头到尾的遍历, O(n)
# 最坏情况,每次插入都相当于在数组的第一个位置插入新的数据,O(n^2)
# 平均情况作简单估算,
# 在有序数组中插入一个值的时间复杂度是O(n), 插入排序只是排序了n次, 也就是O(n^2)
# #### 实现
# 类似于纸牌游戏,得到新牌后插到合适的位置
# 假设a[0]是已排序区间,接着就近取出未排序区间中的第一个元素(a[1]), 寻找a[1]在已排序区间中的位置,进行插入,
# 接下来依次排序a[2],a[3]...a[n]
# range(3): 0 1 2
def InsertSort(arr):
for i in range(len(arr)):
temp = arr[i]
j = i
# 取未排序区间中的元素,在已排序区间中找到合适的插入位置将其插入,并保证已排序区间数据一直有序
# j > 0表示开始时,a[0]是已排序区间; temp是未排序区间中的第一个元素
# 接下来保证已排序区间有序, 当temp小于已排序区间中元素, 有序元素相应后移,temp插入到合适位置
while j > 0 and temp < arr[j - 1]:
# print(arr)
arr[j] = arr[j - 1]
j = j - 1
arr[j] = temp
return arr
# Shell Sort 希尔排序
# 按从大到小的间隔进行插入排序
# #### 分析
# 1. 原地排序算法: 只涉及数据的比较和移动, 不需要额外的临时空间, 空间复杂度为O(1)
# 2. 稳定的: 进行严格比较时,即相等的元素不进行交换 arr[j-gap] > temp,等值的元素在排序前后不会改变顺序
# 3. 时间复杂度: 例: 5,4,3,2,1
# 最好时,比较一个数据就能确定插入的位置,只是进行8*n次从头到尾的遍历, 不需要进行元素交换 O(n)
# 最坏情况, 希尔排序中gap为4时得到交换了1和5, 后续交换3次, 总共交换了4次
# 插入排序中, 要经过5次交换, 到达1的位置, 总共交换10次
def ShellInsertSort(arr, gap):
for i in range(len(arr)):
temp = arr[i]
j = i
while j >= gap and arr[j - gap] > temp:
# print(arr)
arr[j] = arr[j - gap]
j -= gap
arr[j] = temp
def ShellSort(arr):
# 基于经验得出的一组序列, 但是序列的生成规律还未得到证明
# Marcin Ciura's gap sequence
gaps = [701, 301, 132, 57, 23, 10, 4, 1]
for gap in gaps:
ShellInsertSort(arr, gap)
return arr
# Selection Sort 选择排序
# 选择排序算法的实现思路有点类似插入排序,也分已排序区间和未排序区间。
# 但是**选择排序每次会从未排序区间中找到最小的元素,将其放到已排序区间的末尾**。
# #### 分析
# 1. 原地排序算法: 只涉及数据的比较和移动, 不需要额外的临时空间, 空间复杂度为O(1)
# 2. 稳定的: 进行严格比较时,即相等的元素不进行交换,等值的元素在排序前后不会改变顺序
# 3. 时间复杂度: 即使是有序的情况,每次也要遍历一遍未排序区间,复杂度为 n+n-1+...1 , O(n^2)
# 最坏情况,每次也只是需要遍历一遍未排序区间选择的最小元素,O(n^2)
# 平均情况作简单估算,
# 时间复杂度的上限和下限都是O(n^2), 所以平均情况也是O(n^2)
# #### 实现
# 每次选择出一个最值,下次排序只需要比较剩下的元素,需要排序的元素越来越少
def FindMin(arr):
if not arr or len(arr) <= 0:
return
temp = arr[0]
curIndex = 0
for i in range(1, len(arr)):
if temp > arr[i]:
temp = arr[i]
curIndex = i
return curIndex
def SelectionSort(arr):
sortedArr = []
minIndex = 0
for i in range(len(arr)):
minIndex = FindMin(arr)
# arr数组中的最小值被弹出
sortedArr.append(arr.pop(minIndex))
return sortedArr
if __name__ == '__main__':
print(datetime.now().strftime('%H:%M:%S.%f'))
print(
'bubbleSort:',
bubbleSort(
[90, 0, -1, 22, 3, 2, 2, 1, 44, 55, 32, 9, 8, 7, 6, 5, 5, 3, 2]))
print(datetime.now().strftime('%H:%M:%S.%f'))
print(
'SelectionSort:',
SelectionSort(
[90, 0, -1, 22, 3, 2, 2, 1, 44, 55, 32, 9, 8, 7, 6, 5, 5, 3, 2]))
print(datetime.now().strftime('%H:%M:%S.%f'))
print(
'InsertSort:',
InsertSort(
[90, 0, -1, 22, 3, 2, 2, 1, 44, 55, 32, 9, 8, 7, 6, 5, 5, 3, 2]))
print(datetime.now().strftime('%H:%M:%S.%f'))
print(
'ShellSort:',
ShellSort(
[90, 0, -1, 22, 3, 2, 2, 1, 44, 55, 32, 9, 8, 7, 6, 5, 5, 3, 2]))
# print(datetime.now().strftime('%H:%M:%S.%f'))
# print('ShellSort2:', ShellSort([5,4,3,2,1]))
# print(datetime.now().strftime('%H:%M:%S.%f'))
# print('InsertSort2:', InsertSort([5,4,3,2,1]))
# print(datetime.now().strftime('%H:%M:%S.%f'))
|
import time
from js9 import j
JSBASE = j.application.jsbase_get_class()
class TIMER(JSBASE):
def __init__(self):
self.__jslocation__ = "j.tools.timer"
JSBASE.__init__(self)
@staticmethod
def execute_until(callback, timeout=60, interval=0.2):
"""
Check periodicly if callback function returns True
:param callback: Callback function
:type callback: callable
:param timeout: Amount of time to keep checing in seconds
:type timeout: int
:param interval: Pause time inbetween calling the callback
:type interval: float
:return boolean indicating callback was returned true
:rtype boolean
"""
start = time.time()
while start + timeout > time.time():
result = callback()
if result:
return result
time.sleep(interval)
return False
@staticmethod
def start(cat=""):
TIMER._cat = cat
TIMER.clean()
TIMER._start = time.time()
@staticmethod
def stop(nritems=0, log=True):
TIMER._stop = time.time()
TIMER.duration = TIMER._stop - TIMER._start
if nritems > 0:
TIMER.nritems = float(nritems)
if TIMER.duration > 0:
TIMER.performance = float(nritems) / float(TIMER.duration)
if log:
TIMER.result()
@staticmethod
def clean():
TIMER._stop = 0.0
TIMER._start = 0.0
TIMER.duration = 0.0
TIMER.performance = 0.0
TIMER.nritems = 0.0
@staticmethod
def result():
if TIMER._cat !="":
print("\nDURATION FOR:%s"%TIMER._cat)
print(("duration:%s" % TIMER.duration))
print(("nritems:%s" % TIMER.nritems))
print(("performance:%s/sec" % int(TIMER.performance)))
def test(self):
"""
js9 'j.tools.timer.test()'
"""
j.tools.timer.start("something")
for i in range(20):
time.sleep(0.1)
j.tools.timer.stop(20)
|
from msal import ConfidentialClientApplication
class AzureAdAppFactory:
@classmethod
def create(cls, **kwargs) -> ConfidentialClientApplication:
client_id = kwargs["client_id"]
client_secret = kwargs["client_secret"]
authority = kwargs["authority"]
return ConfidentialClientApplication(
client_id,
client_secret,
authority)
|
import hashlib
import pytest
import random
from two1.crypto.ecdsa_base import Point
from two1.crypto import ecdsa_openssl
from two1.crypto import ecdsa_python
def make_low_s(curve, p, rec_id):
new_p = p
if p.y >= (curve.n // 2):
new_p = Point(p.x, curve.n - p.y)
rec_id ^= 0x1
return new_p, rec_id
@pytest.mark.parametrize("curve,point_type", [
(ecdsa_python.p256(), 'affine'),
(ecdsa_python.p256(), 'jacobian'),
(ecdsa_python.secp256k1(), 'affine'),
(ecdsa_python.secp256k1(), 'jacobian')
])
def test_ecpoint(curve, point_type):
# Test to see if n * G = point at infinity
if point_type == 'affine':
base_point = ecdsa_python.ECPointAffine(curve, curve.Gx, curve.Gy)
elif point_type == 'jacobian':
base_point = ecdsa_python.ECPointJacobian(curve, curve.Gx, curve.Gy, 1)
else:
print("Unsupported point_type %s!" % (point_type))
res = base_point * curve.n
assert res.infinity
# Next part is a suggestion from:
# http://crypto.stackexchange.com/a/787
for i in range(100):
a = random.randrange(1, curve.n)
b = random.randrange(1, curve.n)
c = (a + b) % curve.n
P = base_point * a
Q = base_point * b
R = base_point * c
P_plus_Q = P + Q
Q_plus_P = Q + P
# Jacobian coordinates are not unique (i.e. for every Z != 0
# there is a different X, Y but when converted to affine yield
# the same X', Y'), so we should convert to affine before
# asserting
if point_type == 'jacobian':
P = P.to_affine()
Q = Q.to_affine()
R = R.to_affine()
P_plus_Q = P_plus_Q.to_affine()
Q_plus_P = Q_plus_P.to_affine()
try:
assert P_plus_Q == Q_plus_P
assert P_plus_Q == R
assert Q_plus_P == R
except AssertionError:
print("a = %d" % (a))
print("b = %d" % (b))
print("c = %d" % (c))
print("P = %s" % (P))
print("Q = %s" % (Q))
print("R = %s" % (R))
print("P_plus_Q = %s" % (P_plus_Q))
print("Q_plus_P = %s" % (Q_plus_P))
return False
return True
@pytest.mark.parametrize("curve", [
ecdsa_python.p256(),
ecdsa_openssl.p256()
])
def test_p256(curve):
point_class = None
if isinstance(curve, ecdsa_python.p256):
point_class = ecdsa_python.ECPointAffine
# Test the basic operations, test vectors taken from:
# https://www.nsa.gov/ia/_files/nist-routines.pdf, Section 4.3
S = ecdsa_python.ECPointJacobian(curve,
0xde2444bebc8d36e682edd27e0f271508617519b3221a8fa0b77cab3989da97c9,
0xc093ae7ff36e5380fc01a5aad1e66659702de80f53cec576b6350b243042a256,
1)
T = ecdsa_python.ECPointJacobian(curve,
0x55a8b00f8da1d44e62f6b3b25316212e39540dc861c89575bb8cf92e35e0986b,
0x5421c3209c2d6c704835d82ac4c3dd90f61a8a52598b9e7ab656e9d8c8b24316,
1)
# Addition
R = (S + T).to_affine()
assert R.x == 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e
assert R.y == 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264
# Subtraction
R = (S - T).to_affine()
assert R.x == 0xc09ce680b251bb1d2aad1dbf6129deab837419f8f1c73ea13e7dc64ad6be6021
assert R.y == 0x1a815bf700bd88336b2f9bad4edab1723414a022fdf6c3f4ce30675fb1975ef3
# Point doubling
R = (S.double()).to_affine()
assert R.x == 0x7669e6901606ee3ba1a8eef1e0024c33df6c22f3b17481b82a860ffcdb6127b0
assert R.y == 0xfa878162187a54f6c39f6ee0072f33de389ef3eecd03023de10ca2c1db61d0c7
# Scalar multiplication
d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd
R = (S * d).to_affine()
assert R.x == 0x51d08d5f2d4278882946d88d83c97d11e62becc3cfc18bedacc89ba34eeca03f
assert R.y == 0x75ee68eb8bf626aa5b673ab51f6e744e06f8fcf8a6c0cf3035beca956a7b41d5
# Joint scalar multiplicaton
e = 0xd37f628ece72a462f0145cbefe3f0b355ee8332d37acdd83a358016aea029db7
R = (S * d + T * e).to_affine()
assert R.x == 0xd867b4679221009234939221b8046245efcf58413daacbeff857b8588341f6b8
assert R.y == 0xf2504055c03cede12d22720dad69c745106b6607ec7e50dd35d54bd80f615275
else:
point_class = ecdsa_openssl.ECPointAffine
# First test nonce generation according to rfc6979
private_key = 0xC9AFA9D845BA75166B5C215767B1D6934E50C3DB36E89B127B8A622B120F6721
public_key_x = 0x60FED4BA255A9D31C961EB74C6356D68C049B8923B61FA6CE669622E60F29FB6
public_key_y = 0x7903FE1008B8BC99A41AE9E95628BC64F2F1B20C2D7E9F5177A3C294D4462299
pub_key = curve.public_key(private_key)
assert pub_key.x == public_key_x
assert pub_key.y == public_key_y
message = b'sample'
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0xA6E3C57DD01ABE90086538398355DD4C3B17AA873382B0F24D6129493D8AAD60
sig_pt, _ = curve.sign(message, private_key)
curve.y_from_x(sig_pt.x)
assert sig_pt.x == 0xEFD48B2AACB6A8FD1140DD9CD45E81D69D2C877B56AAF991C34D0EA84EAF3716
assert sig_pt.y == 0xF7CB1C942D657C41D436C7A1B6E29F65F3E900DBB9AFF4064DC4AB2F843ACDA8
keys = curve.recover_public_key(message, sig_pt)
assert len(keys) > 0
matching_keys = 0
for k, recid in keys:
if k is not None and k.x == public_key_x and k.y == public_key_y:
matching_keys += 1
assert matching_keys > 0
assert curve.verify(message, sig_pt, point_class(curve, public_key_x, public_key_y))
# Taken from https://www.nsa.gov/ia/_files/ecdsa.pdf Appendix D.1.1
private_key = 0x70a12c2db16845ed56ff68cfc21a472b3f04d7d6851bf6349f2d7d5b3452b38a
public_key_x = 0x8101ece47464a6ead70cf69a6e2bd3d88691a3262d22cba4f7635eaff26680a8
public_key_y = 0xd8a12ba61d599235f67d9cb4d58f1783d3ca43e78f0a5abaa624079936c0c3a9
pub_key = curve.public_key(private_key)
assert pub_key.x == public_key_x
assert pub_key.y == public_key_y
k = 0x580ec00d856434334cef3f71ecaed4965b12ae37fa47055b1965c7b134ee45d0
modinv_k = 0x6a664fa115356d33f16331b54c4e7ce967965386c7dcbf2904604d0c132b4a74
if isinstance(curve, ecdsa_python.p256):
# Test modular inverse (for signing)
assert curve.modinv(k, curve.n) == modinv_k
message = b'This is only a test message. It is 48 bytes long'
sig_pt, _ = curve._sign(message, private_key, True, k)
assert sig_pt.x == 0x7214bc9647160bbd39ff2f80533f5dc6ddd70ddf86bb815661e805d5d4e6f27c
assert sig_pt.y == 0x7d1ff961980f961bdaa3233b6209f4013317d3e3f9e1493592dbeaa1af2bc367
assert curve.verify(message, sig_pt, point_class(curve, public_key_x, public_key_y))
@pytest.mark.parametrize("curve", [
ecdsa_python.secp256k1(),
ecdsa_openssl.secp256k1()
])
def test_secp256k1(curve):
# Don't test point operations for OpenSSL
if isinstance(curve, ecdsa_python.secp256k1):
private_key, pub_key_aff = curve.gen_key_pair()
pub_key_jac = ecdsa_python.ECPointJacobian.from_affine(pub_key_aff)
pub_key_jac_2 = pub_key_jac * 2
pub_key_aff_2 = pub_key_aff * 2
assert pub_key_jac_2.to_affine() == pub_key_aff_2
pub_key_aff_3 = pub_key_aff_2 + pub_key_aff
pub_key_jac_3 = pub_key_jac_2 + pub_key_jac
assert pub_key_jac_3.to_affine() == pub_key_aff_3
k = 0xAA5E28D6A97A2479A65527F7290311A3624D4CC0FA1578598EE3C2613BF99522
kG = (curve.base_point * k).to_affine()
assert kG.x == 0x34F9460F0E4F08393D192B3C5133A6BA099AA0AD9FD54EBCCFACDFA239FF49C6
assert kG.y == 0x0B71EA9BD730FD8923F6D25A7A91E7DD7728A960686CB5A901BB419E0F2CA232
k = 0x7E2B897B8CEBC6361663AD410835639826D590F393D90A9538881735256DFAE3
kG = (curve.base_point * k).to_affine()
assert kG.x == 0xD74BF844B0862475103D96A611CF2D898447E288D34B360BC885CB8CE7C00575
assert kG.y == 0x131C670D414C4546B88AC3FF664611B1C38CEB1C21D76369D7A7A0969D61D97D
k = 0x6461E6DF0FE7DFD05329F41BF771B86578143D4DD1F7866FB4CA7E97C5FA945D
kG = (curve.base_point * k).to_affine()
assert kG.x == 0xE8AECC370AEDD953483719A116711963CE201AC3EB21D3F3257BB48668C6A72F
assert kG.y == 0xC25CAF2F0EBA1DDB2F0F3F47866299EF907867B7D27E95B3873BF98397B24EE1
k = 0x376A3A2CDCD12581EFFF13EE4AD44C4044B8A0524C42422A7E1E181E4DEECCEC
kG = (curve.base_point * k).to_affine()
assert kG.x == 0x14890E61FCD4B0BD92E5B36C81372CA6FED471EF3AA60A3E415EE4FE987DABA1
assert kG.y == 0x297B858D9F752AB42D3BCA67EE0EB6DCD1C2B7B0DBE23397E66ADC272263F982
k = 0x1B22644A7BE026548810C378D0B2994EEFA6D2B9881803CB02CEFF865287D1B9
kG = (curve.base_point * k).to_affine()
assert kG.x == 0xF73C65EAD01C5126F28F442D087689BFA08E12763E0CEC1D35B01751FD735ED3
assert kG.y == 0xF449A8376906482A84ED01479BD18882B919C140D638307F0C0934BA12590BDE
# test getting y from x
x = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
y1, y2 = curve.y_from_x(x)
assert y1 == 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
# test the nonce generation, these test-vectors are taken from:
# https://bitcointalk.org/index.php?topic=285142.25
private_key = 0x1
message = b"Satoshi Nakamoto"
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15
sig_pt, rec_id = curve.sign(message, private_key)
sig_pt, _ = make_low_s(curve, sig_pt, rec_id)
sig_full = (sig_pt.x << curve.nlen) + sig_pt.y
assert sig_full == 0x934b1ea10a4b3c1757e2b0c017d0b6143ce3c9a7e6a4a49860d7a6ab210ee3d82442ce9d2b916064108014783e923ec36b49743e2ffa1c4496f01a512aafd9e5 # nopep8
private_key = 0x1
message = b"All those moments will be lost in time, like tears in rain. Time to die..."
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3
sig_pt, rec_id = curve.sign(message, private_key)
sig_pt, _ = make_low_s(curve, sig_pt, rec_id)
sig_full = (sig_pt.x << curve.nlen) + sig_pt.y
assert sig_full == 0x8600dbd41e348fe5c9465ab92d23e3db8b98b873beecd930736488696438cb6b547fe64427496db33bf66019dacbf0039c04199abb0122918601db38a72cfc21 # nopep8
private_key = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140
message = b"Satoshi Nakamoto"
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90
sig_pt, _ = curve.sign(message, private_key)
sig_pt, rec_id = make_low_s(curve, sig_pt, rec_id)
sig_full = (sig_pt.x << curve.nlen) + sig_pt.y
assert sig_full == 0xfd567d121db66e382991534ada77a6bd3106f0a1098c231e47993447cd6af2d06b39cd0eb1bc8603e159ef5c20a5c8ad685a45b06ce9bebed3f153d10d93bed5 # nopep8
private_key = 0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181
message = b"Alan Turing"
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1
sig_pt, rec_id = curve.sign(message, private_key)
sig_pt, _ = make_low_s(curve, sig_pt, rec_id)
sig_full = (sig_pt.x << curve.nlen) + sig_pt.y
assert sig_full == 0x7063ae83e7f62bbb171798131b4a0564b956930092b33b07b395615d9ec7e15c58dfcc1e00a35e1572f366ffe34ba0fc47db1e7189759b9fb233c5b05ab388ea # nopep8
private_key = 0xe91671c46231f833a6406ccbea0e3e392c76c167bac1cb013f6f1013980455c2
message = b"There is a computer disease that anybody who works with computers knows about. It's a very serious disease and it interferes completely with the work. The trouble with computers is that you 'play' with them!" # nopep8
k = curve._nonce_rfc6979(private_key, hashlib.sha256(message).digest())
assert k == 0x1F4B84C23A86A221D233F2521BE018D9318639D5B8BBD6374A8A59232D16AD3D
sig_pt, rec_id = curve.sign(message, private_key)
sig_pt, _ = make_low_s(curve, sig_pt, rec_id)
sig_full = (sig_pt.x << curve.nlen) + sig_pt.y
assert sig_full == 0xb552edd27580141f3b2a5463048cb7cd3e047b97c9f98076c32dbdf85a68718b279fa72dd19bfae05577e06c7c0c1900c371fcd5893f7e1d56a37d30174671f6 # nopep8
|
#!/usr/bin/env python
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import atexit
import collections
import json
import hashlib
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
from tools import util
# This script is run with `buck run`, but needs to shell out to buck; this is
# only possible if we avoid buckd.
BUCK_ENV = dict(os.environ)
BUCK_ENV['NO_BUCKD'] = '1'
HEADER = """\
include_defs('//lib/js.defs')
# AUTOGENERATED BY BOWER2BUCK
#
# This file should be merged with an existing BUCK file containing these rules.
#
# This comment SHOULD NOT be copied to the existing BUCK file, and you should
# leave alone any non-bower_component contents of the file.
#
# Generally, the following attributes SHOULD be copied from this file to the
# existing BUCK file:
# - package: the normalized package name
# - version: the exact version number
# - deps: direct dependencies of the package
# - sha1: a hash of the package contents
#
# The following fields SHOULD NOT be copied to the existing BUCK file:
# - semver: manually-specified semantic version, not included in autogenerated
# output.
#
# The following fields require SPECIAL HANDLING:
# - license: all licenses in this file are specified as TODO. You must replace
# this text with one of the existing licenses defined in lib/BUCK, or
# define a new one if necessary. Leave existing licenses alone.
"""
def usage():
print(('Usage: %s -o <outfile> [//path/to:bower_components_rule...]'
% sys.argv[0]),
file=sys.stderr)
return 1
class Rule(object):
def __init__(self, bower_json_path):
with open(bower_json_path) as f:
bower_json = json.load(f)
self.name = bower_json['name']
self.version = bower_json['version']
self.deps = bower_json.get('dependencies', {})
self.license = bower_json.get('license', 'NO LICENSE')
self.sha1 = util.hash_bower_component(
hashlib.sha1(), os.path.dirname(bower_json_path)).hexdigest()
def to_rule(self, packages):
if self.name not in packages:
raise ValueError('No package name found for %s' % self.name)
lines = [
'bower_component(',
" name = '%s'," % self.name,
" package = '%s'," % packages[self.name],
" version = '%s'," % self.version,
]
if self.deps:
if len(self.deps) == 1:
lines.append(" deps = [':%s']," % next(self.deps.iterkeys()))
else:
lines.append(' deps = [')
lines.extend(" ':%s'," % d for d in sorted(self.deps.iterkeys()))
lines.append(' ],')
lines.extend([
" license = 'TODO: %s'," % self.license,
" sha1 = '%s'," % self.sha1,
')'])
return '\n'.join(lines)
def build_bower_json(targets, buck_out):
bower_json = collections.OrderedDict()
bower_json['name'] = 'bower2buck-output'
bower_json['version'] = '0.0.0'
bower_json['description'] = 'Auto-generated bower.json for dependency management'
bower_json['private'] = True
bower_json['dependencies'] = {}
deps = subprocess.check_output(
['buck', 'query', '-v', '0',
"filter('__download_bower', deps(%s))" % '+'.join(targets)],
env=BUCK_ENV)
deps = deps.replace('__download_bower', '__bower_version').split()
subprocess.check_call(['buck', 'build'] + deps, env=BUCK_ENV)
for dep in deps:
dep = dep.replace(':', '/').lstrip('/')
depout = os.path.basename(dep)
version_json = os.path.join(buck_out, 'gen', dep, depout)
with open(version_json) as f:
bower_json['dependencies'].update(json.load(f))
tmpdir = tempfile.mkdtemp()
atexit.register(lambda: shutil.rmtree(tmpdir))
ret = os.path.join(tmpdir, 'bower.json')
with open(ret, 'w') as f:
json.dump(bower_json, f, indent=2)
return ret
def get_package_name(name, package_version):
v = package_version.lower()
if '#' in v:
return v[:v.find('#')]
return name
def get_packages(path):
with open(path) as f:
bower_json = json.load(f)
return dict((n, get_package_name(n, v))
for n, v in bower_json.get('dependencies', {}).iteritems())
def collect_rules(packages):
# TODO(dborowitz): Use run_npm_binary instead of system bower.
rules = {}
subprocess.check_call(['bower', 'install'])
for dirpath, dirnames, filenames in os.walk('.', topdown=True):
if '.bower.json' not in filenames:
continue
del dirnames[:]
rule = Rule(os.path.join(dirpath, '.bower.json'))
rules[rule.name] = rule
# Oddly, the package name referred to in the deps section of dependents,
# e.g. 'PolymerElements/iron-ajax', is not found anywhere in this
# bower.json, which only contains 'iron-ajax'. Build up a map of short name
# to package name so we can resolve them later.
# TODO(dborowitz): We can do better:
# - Infer 'user/package' from GitHub URLs (i.e. a simple subset of Bower's package
# resolution logic).
# - Resolve aliases using https://bower.herokuapp.com/packages/shortname
# (not currently biting us but it might in the future.)
for n, v in rule.deps.iteritems():
p = get_package_name(n, v)
old = packages.get(n)
if old is not None and old != p:
raise ValueError('multiple packages named %s: %s != %s' % (n, p, old))
packages[n] = p
return rules
def find_buck_out():
dir = os.getcwd()
while not os.path.isfile(os.path.join(dir, '.buckconfig')):
dir = os.path.dirname(dir)
return os.path.join(dir, 'buck-out')
def main(args):
opts = optparse.OptionParser()
opts.add_option('-o', help='output file location')
opts, args = opts.parse_args()
if not opts.o or not all(a.startswith('//') for a in args):
return usage()
outfile = os.path.abspath(opts.o)
buck_out = find_buck_out()
targets = args if args else ['//polygerrit-ui/...']
bower_json_path = build_bower_json(targets, buck_out)
os.chdir(os.path.dirname(bower_json_path))
packages = get_packages(bower_json_path)
rules = collect_rules(packages)
with open(outfile, 'w') as f:
f.write(HEADER)
for _, r in sorted(rules.iteritems()):
f.write('\n\n%s' % r.to_rule(packages))
print('Wrote bower_components rules to:\n %s' % outfile)
if __name__ == '__main__':
main(sys.argv[1:])
|
from __future__ import division
import numpy as np
import tensorflow as tf
# components
from tensorflow.python.ops.nn import dropout as drop
from cnn import conv_layer as conv
from cnn import conv_relu_layer as conv_relu
from cnn import pooling_layer as pool
from cnn import fc_layer as fc
from cnn import fc_relu_layer as fc_relu
def vs_multilayer(input_batch,name,middle_layer_dim=1000,reuse=False,test=False):
with tf.variable_scope(name):
if reuse==True:
# print name+" reuse variables"
tf.get_variable_scope().reuse_variables()
else:
pass
# print name+" doesn't reuse variables"
layer1 = fc_relu('layer1', input_batch, output_dim=middle_layer_dim)
if test:
layer1 = drop(layer1, 1)
else:
layer1=drop(layer1,0.5)
outputs = fc('layer2', layer1,output_dim=4)
return outputs
|
import unittest
from fastapi.testclient import TestClient
from core.api.controller import create_app
from core.api.request.judge_request import JudgeRequest
from core.api.request.eval_request import EvalRequest
from core.api.request.reading_request import ReadingRequest
class TestAPI(unittest.TestCase):
DAJARE_JUDGE_PATH: str = '/judge'
DAJARE_EVAL_PATH: str = '/eval'
DAJARE_READING_PATH: str = '/reading'
SAMPLE_STR: str = '布団が吹っ飛んだ'
def setUp(self):
self.app = TestClient(create_app())
def test_正_ダジャレを判定(self):
# setup
request_body = JudgeRequest(dajare=self.SAMPLE_STR)
# test
res = self.app.get(self.DAJARE_JUDGE_PATH, params=request_body)
res_json: dict = res.json()
# verify
self.assertEqual(200, res.status_code)
self.assertIsInstance(res_json["is_dajare"], bool)
def test_異_判定APIのパラメータが不足(self):
# test
res = self.app.get(self.DAJARE_JUDGE_PATH, params={})
# verify
self.assertEqual(422, res.status_code)
def test_正_ダジャレを評価(self):
# setup
request_body = EvalRequest(dajare=self.SAMPLE_STR)
# test
res = self.app.get(self.DAJARE_EVAL_PATH, params=request_body)
res_json: dict = res.json()
# verify
self.assertEqual(200, res.status_code)
self.assertIsInstance(res_json["score"], float)
def test_異_評価APIのパラメータが不足(self):
# test
res = self.app.get(self.DAJARE_EVAL_PATH, params={})
# verify
self.assertEqual(422, res.status_code)
def test_正_ダジャレを読みに変換(self):
# setup
request_body = ReadingRequest(dajare=self.SAMPLE_STR)
# test
res = self.app.get(self.DAJARE_READING_PATH, params=request_body)
res_json: dict = res.json()
# verify
self.assertEqual(200, res.status_code)
self.assertIsInstance(res_json["reading"], str)
def test_異_読み変換APIのパラメータが不足(self):
# test
res = self.app.get(self.DAJARE_READING_PATH, params={})
# verify
self.assertEqual(422, res.status_code)
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import survey
class Baby(survey.Record):
"""Represent a baby record """
class Babies(survey.Table):
"""Represents the Babies table."""
def __init__(self):
survey.Table.__init__(self)
self.startflg = False
def GetFilename(self):
return 'babyboom.dat'
def GetFields(self):
return [
('birthtime', 1, 8, int),
('gender', 9, 16, int),
('birthwgt', 17, 24, int),
('minutes', 25, 32, int)
]
def Recode(self):
pass
def ReadRecords(self, data_dir='.', n=None):
filename = self.GetFilename()
self.ReadFile(data_dir, filename, self.GetFields(), Baby, n)
self.Recode()
def MakeRecord(self, line, fields, constructor):
if not self.startflg:
self.startflg = (line.find('START DATA:') > -1)
return None
obj = constructor()
for (field, start, end, cast) in fields:
try:
s = line[start-1:end]
val = cast(s)
except ValueError:
val = 'NA'
setattr(obj, field, val)
return obj
def AddRecord(self, record):
if record is not None:
self.records.append(record)
|
from abc import ABCMeta, abstractmethod
class Attack(metaclass=ABCMeta):
@abstractmethod
def attack(self, model, adj, features, **kwargs):
"""
:param model:
:param features:
:param adj:
:param kwargs:
:return:
"""
class ModificationAttack(Attack):
@abstractmethod
def attack(self, **kwargs):
"""
:param kwargs:
:return:
"""
@abstractmethod
def modification(self):
"""
:return:
"""
class InjectionAttack(Attack):
@abstractmethod
def attack(self, **kwargs):
"""
:param kwargs:
:return:
"""
@abstractmethod
def injection(self, **kwargs):
"""
:return:
"""
@abstractmethod
def update_features(self, **kwargs):
"""
:return:
"""
|
"""Quality-time specific types."""
from typing import Any, Dict, List, NewType, Optional, Sequence, Union
Entity = Dict[str, Union[int, str]] # pylint: disable=invalid-name
Entities = List[Entity]
ErrorMessage = Optional[str]
Job = Dict[str, Any]
Jobs = List[Job]
Namespaces = Dict[str, str] # Namespace prefix to Namespace URI mapping
Response = Dict[str, Any]
URL = NewType("URL", str)
Subject = Dict[str, Union[str, Sequence[URL]]]
Report = Dict[str, Sequence[Subject]]
Value = Optional[str]
Parameter = Union[str, List[str]]
|
from combus.command import Command
from combus.command_bus import CommandBus
from combus.command_handler import CommandHandler
def test_it_will_handle_a_command():
class Bar(object):
def __init__(self):
self._foo = None
@property
def foo(self):
return self._foo
@foo.setter
def foo(self, foo):
self._foo = foo
class FooHandler(CommandHandler):
def __init__(self, helper: Bar):
self.helper = helper
def _handle(self, command: Command):
self.helper.foo = command.foo
class FooCommand(Command):
def __init__(self, foo):
self._foo = foo
@property
def foo(self):
return self._foo
helper = Bar()
bus = CommandBus()
bus.link_command_with_handler(FooCommand.__name__, FooHandler(helper=helper))
bus.handle(FooCommand(foo="bar"))
assert 'bar' == helper.foo
|
import glob
import itertools
import json
import os
import re
from functools import partial
from math import isclose
import ConfigSpace as CS
import ConfigSpace.hyperparameters as CSH
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from ConfigSpace.read_and_write import json as config_space_json_r_w
from scipy.stats import norm, spearmanr, kendalltau
from sklearn.metrics import mean_squared_error, r2_score
from tqdm import tqdm
# from nasbench301.surrogate_models.bananas.bananas import BANANASModel
# from nasbench301.surrogate_models.gnn.gnn import GNNSurrogateModel
from nasbench301.surrogate_models.gradient_boosting.lgboost import LGBModelTime
from nasbench301.surrogate_models.gradient_boosting.xgboost import XGBModel
# from nasbench301.surrogate_models.random_forrest.sklearn_forest import SklearnForest
# from nasbench301.surrogate_models.svr.nu_svr import NuSVR
# from nasbench301.surrogate_models.svr.svr import SVR
sns.set_style('whitegrid')
# model_dict = {
#
# # NOTE: RUNTIME MODELS SHOULD END WITH "_time"
#
# # Graph Convolutional Neural Networks
# 'gnn_gin': partial(GNNSurrogateModel, gnn_type='gnn_gin'),
# 'gnn_diff_pool': partial(GNNSurrogateModel, gnn_type='gnn_diff_pool'),
# 'gnn_deep_multisets': partial(GNNSurrogateModel, gnn_type='gnn_deep_multisets'),
# 'gnn_vs_gae': partial(GNNSurrogateModel, gnn_type='gnn_vs_gae'),
# 'gnn_vs_gae_classifier': partial(GNNSurrogateModel, gnn_type='gnn_vs_gae_classifier'),
# 'deeper_gnn': partial(GNNSurrogateModel, gnn_type='deeper_gnn'),
# 'bananas': BANANASModel,
#
# # Baseline methods
# #'random_forest': RandomForest,
# 'sklearn_forest': SklearnForest,
# 'xgb': XGBModel,
# 'xgb_time': XGBModelTime,
# 'lgb': LGBModel,
# 'lgb_time': LGBModelTime,
# #'ngb': NGBModel,
# 'svr': SVR,
# 'svr_nu': NuSVR,
# }
model_dict = {
'xgb': XGBModel,
'lgb_time': LGBModelTime,
}
def evaluate_metrics(y_true, y_pred, prediction_is_first_arg):
"""
Create a dict with all evaluation metrics
"""
if prediction_is_first_arg:
y_true, y_pred = y_pred, y_true
metrics_dict = dict()
metrics_dict["mse"] = mean_squared_error(y_true, y_pred)
metrics_dict["rmse"] = np.sqrt(metrics_dict["mse"])
metrics_dict["r2"] = r2_score(y_true, y_pred)
metrics_dict["kendall_tau"], p_val = kendalltau(y_true, y_pred)
metrics_dict["kendall_tau_2_dec"], p_val = kendalltau(y_true, np.round(np.array(y_pred), decimals=2))
metrics_dict["kendall_tau_1_dec"], p_val = kendalltau(y_true, np.round(np.array(y_pred), decimals=1))
metrics_dict["spearmanr"] = spearmanr(y_true, y_pred).correlation
return metrics_dict
def get_model_configspace(model):
"""
Retrieve the model_config
:param model: Name of the model for which you want the default config
:return:
"""
# Find matching config for the model name
model_config_regex = re.compile(".*{}_configspace.json".format(model))
matched_model_config_paths = list(
filter(model_config_regex.match, glob.glob('surrogate_models/configs/model_configs/*/*')))
# Make sure we only matched exactly one config
assert len(matched_model_config_paths) == 1, 'Multiple or no configs matched with the requested model.'
model_config_path = matched_model_config_paths[0]
# Load the configspace object
model_configspace = config_space_json_r_w.read(open(model_config_path, 'r').read())
return model_configspace
def convert_array_to_list(a):
"""Converts a numpy array to list"""
if isinstance(a, np.ndarray):
return a.tolist()
else:
return a
class ConfigLoader:
def __init__(self, config_space_path):
self.config_space = self.load_config_space(config_space_path)
# The exponent to scale the fidelity with.
# Used to move architectures across the fidelity budgets
# Default at None, hence the fidelity values are not changed
self.fidelity_exponent = None
# The number of skip connections to have in the cell
# If this set to None (default) No skip connections will be added to the cell
# Maximum is the maximum number of operations.
self.parameter_free_op_increase_type = None
self.ratio_parameter_free_op_in_cell = None
# Manually adjust a certain set of hyperparameters
self.parameter_change_dict = None
# Save predefined fidelity multiplier
self.fidelity_multiplier = {
'SimpleLearningrateSchedulerSelector:cosine_annealing:T_max': 1.762734383267615,
'NetworkSelectorDatasetInfo:darts:init_channels': 1.3572088082974532,
'NetworkSelectorDatasetInfo:darts:layers': 1.2599210498948732
}
self.fidelity_starts = {
'SimpleLearningrateSchedulerSelector:cosine_annealing:T_max': 50,
'NetworkSelectorDatasetInfo:darts:init_channels': 8,
'NetworkSelectorDatasetInfo:darts:layers': 5
}
def __getitem__(self, path):
"""
Load the results from results.json
:param path: Path to results.json
:return:
"""
json_file = json.load(open(path, 'r'))
config_dict = json_file['optimized_hyperparamater_config']
config_space_instance = self.query_config_dict(config_dict)
val_accuracy = json_file['info'][0]['val_accuracy']
test_accuracy = json_file['test_accuracy']
return config_space_instance, val_accuracy, test_accuracy, json_file
def get_runtime(self, path):
"""
Load the runtime from results.json
:param path: Path to results.json
return:
"""
json_file = json.load(open(path, 'r'))
config_dict = json_file['optimized_hyperparamater_config']
config_space_instance = self.query_config_dict(config_dict)
runtime = json_file['runtime']
return config_space_instance, runtime
def query_config_dict(self, config_dict):
# Evaluation methods
# Scale the hyperparameters if needed
if self.fidelity_exponent is not None:
config_dict = self.scale_fidelity(config_dict)
# Add selected parameter free op
if self.ratio_parameter_free_op_in_cell is not None:
config_dict = self.add_selected_parameter_free_op(config_dict)
# Change a selection of parameters
if self.parameter_change_dict is not None:
config_dict = self.change_parameter(config_dict)
# Create the config space instance based on the config space
config_space_instance = \
self.convert_config_dict_to_configspace_instance(self.config_space, config_dict=config_dict)
return config_space_instance
def add_selected_parameter_free_op(self, config_dict):
"""
Add selected parameter free operation to the config dict
:param config_dict:
:return:
"""
assert self.parameter_free_op_increase_type in ['max_pool_3x3',
'avg_pool_3x3',
'skip_connect'], 'Unknown parameter-free op was selected.'
# Dictionary containing operations
cell_op_dict_sel_param_free = {'normal': {}, 'reduce': {}}
cell_op_dict_non_sel_param_free = {'normal': {}, 'reduce': {}}
for cell_type in ['normal']:
for edge in range(0, 14):
key = 'NetworkSelectorDatasetInfo:darts:edge_{}_{}'.format(cell_type, edge)
op = config_dict.get(key, None)
if op is not None:
if op == self.parameter_free_op_increase_type:
cell_op_dict_sel_param_free[cell_type][key] = op
else:
cell_op_dict_non_sel_param_free[cell_type][key] = op
# Select random subset of operations which to turn to selected parameter-free op
for cell_type in ['normal', 'reduce']:
num_sel_param_free_ops = len(cell_op_dict_sel_param_free[cell_type].values())
num_non_sel_param_free_ops = len(cell_op_dict_non_sel_param_free[cell_type].values())
num_ops = num_sel_param_free_ops + num_non_sel_param_free_ops
desired_num_sel_param_free_ops = np.round(num_ops * self.ratio_parameter_free_op_in_cell).astype(np.int)
remaining_num_sel_param_free_op = desired_num_sel_param_free_ops - num_sel_param_free_ops
if remaining_num_sel_param_free_op > 0:
# There are still more selected parameter free operations to add to satisfy the ratio of
# sel param free op. Therefore override some of the other operations to be parameter free op.
sel_param_free_idx = np.random.choice(num_non_sel_param_free_ops, remaining_num_sel_param_free_op,
replace=False)
for idx, (key, value) in enumerate(cell_op_dict_non_sel_param_free[cell_type].items()):
if idx in sel_param_free_idx:
config_dict[key] = self.parameter_free_op_increase_type
return config_dict
def scale_fidelity(self, config_dict):
"""
Scale the fidelity of the current sample
:param config_dict:
:return:
"""
for name, value in self.fidelity_multiplier.items():
config_dict[name] = int(config_dict[name] * value ** self.fidelity_exponent)
return config_dict
def change_parameter(self, config_dict):
for name, value in self.parameter_change_dict.items():
config_dict[name] = value
return config_dict
def convert_config_dict_to_configspace_instance(self, config_space, config_dict):
"""
Convert a config dictionary to configspace instace
:param config_space:
:param config_dict:
:return:
"""
def _replace_str_bool_with_python_bool(input_dict):
for key, value in input_dict.items():
if value == 'True':
input_dict[key] = True
elif value == 'False':
input_dict[key] = False
else:
pass
return input_dict
# Replace the str true with python boolean type
config_dict = _replace_str_bool_with_python_bool(config_dict)
config_instance = CS.Configuration(config_space, values=config_dict)
return config_instance
@staticmethod
def load_config_space(path):
"""
Load ConfigSpace object
As certain hyperparameters are not denoted as optimizable but overriden later,
they are manually overriden here too.
:param path:
:return:
"""
with open(os.path.join(path), 'r') as fh:
json_string = fh.read()
config_space = config_space_json_r_w.read(json_string)
# Override the constant hyperparameters for num_layers, init_channels and
config_space._hyperparameters.pop('NetworkSelectorDatasetInfo:darts:layers', None)
num_layers = CSH.UniformIntegerHyperparameter(name='NetworkSelectorDatasetInfo:darts:layers', lower=1,
upper=10000)
config_space._hyperparameters.pop('SimpleLearningrateSchedulerSelector:cosine_annealing:T_max', None)
t_max = CSH.UniformIntegerHyperparameter(name='SimpleLearningrateSchedulerSelector:cosine_annealing:T_max',
lower=1, upper=10000)
config_space._hyperparameters.pop('NetworkSelectorDatasetInfo:darts:init_channels', None)
init_channels = CSH.UniformIntegerHyperparameter(name='NetworkSelectorDatasetInfo:darts:init_channels', lower=1,
upper=10000)
config_space._hyperparameters.pop('SimpleLearningrateSchedulerSelector:cosine_annealing:eta_min', None)
eta_min_cosine = CSH.UniformFloatHyperparameter(
name='SimpleLearningrateSchedulerSelector:cosine_annealing:eta_min', lower=0, upper=10000)
config_space.add_hyperparameters([num_layers, t_max, init_channels, eta_min_cosine])
return config_space
def get_config_without_architecture(self, config_instance):
"""
Remove the architecture parameters from the config.
Currently this function retrieves the 5 parameters which are actually changed throughout the results:
num_epochs, num_layers, num_init_channels (3 fidelities) + learning_rate, weight_decay
:param config_instance:
:return:
"""
non_arch_hyperparameters_list = [
config_instance._values['SimpleLearningrateSchedulerSelector:cosine_annealing:T_max'],
config_instance._values['NetworkSelectorDatasetInfo:darts:init_channels'],
config_instance._values['NetworkSelectorDatasetInfo:darts:layers'],
config_instance._values['OptimizerSelector:sgd:learning_rate'],
config_instance._values['OptimizerSelector:sgd:weight_decay']]
return non_arch_hyperparameters_list
class ResultLoader:
def __init__(self, root, filepath_regex, train_val_test_split, seed):
self.root = root
self.filepath_regex = filepath_regex
self.train_val_test_split = train_val_test_split
np.random.seed(seed)
def return_train_val_test(self):
"""
Get the result train/val/test split.
:return:
"""
if self.train_val_test_split['type'] == 'all_result_paths':
paths_split = self.all_result_paths()
elif self.train_val_test_split['type'] == 'filtered_result_paths':
paths_split = self.filtered_result_paths()
elif self.train_val_test_split['type'] == 'per_budget_equal_result_paths':
paths_split = self.per_budget_equal_result_paths()
elif self.train_val_test_split['type'] == 'per_subfolder_equal_ratio':
paths_split = self.per_subfolder_equal_ratio()
elif self.train_val_test_split['type'] == 'no_data':
paths_split = [], [], []
else:
raise ValueError('Unknown train/val/test split.')
train_paths, val_paths, test_paths = paths_split
return train_paths, val_paths, test_paths
def filter_duplicate_dirs(self, paths_to_json):
"""
Checks to configurations in the results.json files and returns paths such that none contains
duplicate configurations.
:param paths_to_json: List of dir/results.json
:return: unique list of dir/results.json w.r.t. configuration
"""
config_hashes = []
for path_to_json in paths_to_json:
with open(path_to_json, "r") as f:
results = json.load(f)
config_hash = hash(results["optimized_hyperparamater_config"].__repr__())
config_hashes.append(config_hash)
_, unique_indices = np.unique(config_hashes, return_index=True)
return list(np.array(paths_to_json)[unique_indices])
def get_splits(self, paths, ratios=None):
"""
Divide the paths into train/val/test splits.
:param paths:
:param ratios:
:return:
"""
if ratios is None:
train_ratio, val_ratio, test_ratio = self.train_val_test_split['train'], self.train_val_test_split['val'], \
self.train_val_test_split['test']
else:
train_ratio, val_ratio, test_ratio = ratios
assert isclose(train_ratio + val_ratio + test_ratio, 1.0,
abs_tol=1e-8), 'The train/val/test split should add up to 1.'
# Randomly shuffle the list
rng = np.random.RandomState(6)
rng.shuffle(paths)
# Extract the train/val/test splits
train_upper_idx = int(train_ratio * len(paths))
val_upper_idx = int((train_ratio + val_ratio) * len(paths))
train_paths = paths[:train_upper_idx]
val_paths = paths[train_upper_idx:val_upper_idx]
test_paths = paths[val_upper_idx:-1]
return train_paths, val_paths, test_paths
def all_result_paths(self):
"""
Return the paths of all results
:return: result paths
"""
all_results_paths = glob.glob(os.path.join(self.root, self.filepath_regex))
print("==> Found %i results paths. Filtering duplicates..." % len(all_results_paths))
all_results_paths.sort()
all_results_paths_filtered = self.filter_duplicate_dirs(all_results_paths)
print("==> Finished filtering. Found %i unique architectures, %i duplicates" % (len(all_results_paths_filtered), \
len(all_results_paths) - len(
all_results_paths_filtered)))
train_paths, val_paths, test_paths = self.get_splits(all_results_paths_filtered)
return train_paths, val_paths, test_paths
def per_subfolder_equal_ratio(self):
"""
:return:
"""
train_paths, val_paths, test_paths = [], [], []
for subdir in os.listdir(os.path.join(self.root, self.filepath_regex)):
subdir_path = os.path.join(self.root, self.filepath_regex, subdir)
# For each subdir split according to the train_val_test_ratios
files_in_subdir = glob.glob(os.path.join(subdir_path, '*'))
files_in_subdir.sort()
train, val, test = self.get_splits(files_in_subdir)
# Add the train paths
train_paths.extend(train)
val_paths.extend(val)
test_paths.extend(test)
return train_paths, val_paths, test_paths
def filtered_result_paths(self):
"""
Return only the paths of the results that match the filter
:return: result paths
"""
# Check result filters have been specified
assert self.train_val_test_split.get('filters', None) is not None, 'Can\'t filter without a result filter.'
# Train/val and test split should not be the same filter
assert self.train_val_test_split['filters']['train_val_filter'] != self.train_val_test_split['filters'][
'test_filter'], 'Train/Val filter should not be the same as the test filter.'
all_results_paths = glob.glob(os.path.join(self.root, 'run_*/results_fidelity_*/results_*.json'))
all_results_paths.sort()
results_per_filter = {result_filter: [] for result_filter in self.train_val_test_split.get('filters').keys()}
for result_path in tqdm(all_results_paths, desc='Filtering results'):
result_json = json.load(open(result_path, 'r'))
# Go through all elements to be filtered
for result_filter_name, result_filter_path in self.train_val_test_split.get('filters').items():
result_filter = json.load(open(result_filter_path, 'r'))
results = []
for filter_key, filter_details in result_filter.items():
# Retrieve the element to be checked
filtered_value = list(find_key_value(filter_key, result_json))
if len(filtered_value):
if filter_details['type'] == "interval":
# Check if the configuration matches the filter interval
lower_filter_val, high_filter_val = filter_details['data']
if lower_filter_val <= filtered_value[0] <= high_filter_val:
results.append(result_path)
else:
continue
elif filter_details['type'] == "list":
# Check whether the value is in a list of pre-specified values
if filtered_value[0] in filter_details['data']:
results.append(result_path)
else:
continue
else:
pass
if len(results) == len(result_filter.keys()):
results_per_filter[result_filter_name].append(results[0])
# Split the train/val split
new_train_ratio = self.train_val_test_split['train'] / (
self.train_val_test_split['train'] + self.train_val_test_split['val'])
new_val_ratio = self.train_val_test_split['val'] / (
self.train_val_test_split['train'] + self.train_val_test_split['val'])
train_paths, val_paths, _ = self.get_splits(results_per_filter['train_val_filter'],
(new_train_ratio, new_val_ratio, 0.0))
test_paths = results_per_filter['test_filter']
assert len(set(results_per_filter['train_val_filter']).intersection(
set(test_paths))) == 0, 'Train/val and test set are not disjoint.'
return train_paths, val_paths, test_paths
def per_budget_equal_result_paths(self):
"""
Here train/val/test split is performed such that *per fidelity* the ratio of train/val/test is consistent.
:return: result_paths
"""
train_paths_dict, val_paths_dict, test_paths_dict = self.per_budget_data()
flat_list_from_list_of_lists = lambda list_of_list: list(itertools.chain.from_iterable(list_of_list))
train_paths, val_paths, test_paths = [flat_list_from_list_of_lists(dict.values()) for dict in
[train_paths_dict, val_paths_dict, test_paths_dict]]
rng = np.random.RandomState(6)
rng.shuffle(train_paths)
rng.shuffle(val_paths)
val_paths(test_paths)
return train_paths, val_paths, test_paths
def per_budget_data(self):
"""
Extract the train/val/test split for each budget
:return: Dictionaries containing the data for each fidelity
"""
train_paths_dict, val_paths_dict, test_paths_dict = {}, {}, {}
for fidelity_num in range(7):
results_in_fidelity = glob.glob(
os.path.join(self.root, 'run_*/results_fidelity_{}/results_*.json').format(fidelity_num))
results_in_fidelity.sort()
# Split the fidelity based on the train/val/test portions
train_paths_in_fidelity, val_paths_in_fidelity, test_paths_in_fidelity = self.get_splits(
results_in_fidelity)
train_paths_dict[fidelity_num] = train_paths_in_fidelity
val_paths_dict[fidelity_num] = val_paths_in_fidelity
test_paths_dict[fidelity_num] = test_paths_in_fidelity
return train_paths_dict, val_paths_dict, test_paths_dict
def find_key_value(key, dictionary):
"""
Check if key is contained in dictionary in a nested way
Source: https://gist.github.com/douglasmiranda/5127251#file-gistfile1-py-L2
:param key:
:param dictionary:
:return:
"""
for k, v in dictionary.items():
if k == key:
yield v
elif isinstance(v, dict):
for result in find_key_value(key, v):
yield result
elif isinstance(v, list):
for d in v:
for result in find_key_value(key, d):
yield result
def scatter_plot(xs, ys, xlabel, ylabel, title):
"""
Creates scatter plot of the predicted and groundtruth performance
:param xs:
:param ys:
:param xlabel:
:param ylabel:
:param title:
:return:
"""
fig = plt.figure(figsize=(4, 3))
plt.tight_layout()
plt.grid(True, which='both', ls='-', alpha=0.5)
plt.scatter(xs, ys, alpha=0.8, s=4)
xs_min = xs.min()
xs_max = xs.max()
plt.plot(np.linspace(xs_min, xs_max), np.linspace(xs_min, xs_max), 'r', alpha=0.5)
plt.xlabel(xlabel=xlabel)
plt.ylabel(ylabel=ylabel)
plt.title(title)
return fig
def plot_predictions(mu_train, mu_test, var_train, var_test, train_y, test_y,
log_dir, name='random forest', x1=0, x2=100, y1=0, y2=100):
f, ax = plt.subplots(1, 2, figsize=(15, 6))
if var_train is not None:
ll = norm.logpdf(np.array(train_y, dtype=np.float), loc=mu_train, scale=np.sqrt(var_train))
c_map = 'viridis'
else:
ll = 'b'
c_map = None
im1 = ax[0].scatter(mu_train, train_y, c=ll, cmap=c_map)
ax[0].set_xlabel('predicted', fontsize=15)
ax[0].set_ylabel('true', fontsize=15)
ax[0].set_title('{} (train)'.format(name), fontsize=15)
ax[0].plot([0, 100], [0, 100], 'k--')
if var_train is not None:
f.colorbar(im1, ax=ax[0])
if var_test is not None:
ll = norm.logpdf(np.array(test_y, dtype=np.float), loc=mu_test, scale=np.sqrt(var_test))
c_map = 'viridis'
else:
ll = 'b'
c_map = None
ax[1].set_xlim([x1, x2])
ax[1].set_ylim([y1, y2])
im1 = ax[1].scatter(mu_test, test_y, c=ll, cmap=c_map)
ax[1].set_xlabel('predicted', fontsize=15)
ax[1].set_ylabel('true', fontsize=15)
ax[1].set_title('{} (test)'.format(name), fontsize=15)
ax[1].plot([0, 100], [0, 100], 'k--')
if var_test is not None:
f.colorbar(im1, ax=ax[1])
plt.tight_layout()
plt.savefig(os.path.join(log_dir, '_'.join(name.split()) + '.jpg'))
return plt.gcf()
class AvgrageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.avg = 0
self.sum = 0
self.cnt = 0
def update(self, val, n=1):
self.sum += val * n
self.cnt += n
self.avg = self.sum / self.cnt
|
supported_resolutions = [
(640, 350, 70),
(640, 350, 85),
(640, 400, 70),
(640, 400, 85),
(640, 480, 60),
(640, 480, 73),
(640, 480, 75),
(640, 480, 85),
(640, 480, 100),
(720, 400, 85),
(768, 576, 60),
(768, 576, 72),
(768, 576, 75),
(768, 576, 85),
(768, 576, 100),
(800, 600, 56),
(800, 600, 60),
(800, 600, 72),
(800, 600, 75),
(800, 600, 85),
(800, 600, 100),
(1024, 768, 43),
(1024, 768, 60),
(1024, 768, 70),
(1024, 768, 75),
(1024, 768, 85),
(1024, 768, 100),
(1152, 864, 60),
(1152, 864, 75),
(1152, 864, 85),
(1152, 864, 100),
(1280, 720, 60),
(1280, 800, 60),
(1280, 960, 60),
(1280, 960, 72),
(1280, 960, 75),
(1280, 960, 85),
(1280, 960, 100),
(1280, 1024, 60),
(1280, 1024, 75),
(1280, 1024, 85),
(1280, 1024, 100),
(1368, 768, 60),
(1400, 1050, 60),
(1400, 1050, 72),
(1400, 1050, 75),
(1400, 1050, 85),
(1400, 1050, 100),
(1440, 900, 60),
(1600, 1200, 60),
(1600, 1200, 65),
(1600, 1200, 70),
(1600, 1200, 75),
(1600, 1200, 85),
(1600, 1200, 100),
(1680, 1050, 60),
(1792, 1344, 60),
(1792, 1344, 75),
(1856, 1392, 60),
(1856, 1392, 75),
(1920, 1080, 60),
(1920, 1200, 60),
(1920, 1440, 60),
(1920, 1440, 75),
# custom resolutions tested on my monitors, but not officially supported
(400, 600, 60),
(200, 600, 60),
(400, 600, 72),
]
# pixels clock values are in MHz
pixel_clock_dict = {
(640, 350, 70): 25.175,
(640, 350, 85): 31.5,
(640, 400, 70): 25.175,
(640, 400, 85): 31.5,
(640, 480, 60): 25.175,
(640, 480, 73): 31.5,
(640, 480, 75): 31.5,
(640, 480, 85): 36,
(640, 480, 100): 43.16,
(720, 400, 85): 35.5,
(768, 576, 60): 34.96,
(768, 576, 72): 42.93,
(768, 576, 75): 45.51,
(768, 576, 85): 51.84,
(768, 576, 100): 62.57,
(800, 600, 56): 36,
(800, 600, 60): 40,
(800, 600, 72): 50,
(800, 600, 75): 49.5,
(800, 600, 85): 56.25,
(800, 600, 100): 68.18,
(1024, 768, 43): 44.9,
(1024, 768, 60): 65,
(1024, 768, 70): 75,
(1024, 768, 75): 78.8,
(1024, 768, 85): 94.5,
(1024, 768, 100): 113.31,
(1152, 864, 60): 81.62,
(1152, 864, 75): 108,
(1152, 864, 85): 119.65,
(1152, 864, 100): 143.47,
(1280, 720, 60): 74.25,
(1280, 800, 60): 83.46,
(1280, 960, 60): 102.1,
(1280, 960, 72): 124.54,
(1280, 960, 75): 129.86,
(1280, 960, 85): 148.5,
(1280, 960, 100): 178.99,
(1280, 1024, 60): 108,
(1280, 1024, 75): 135,
(1280, 1024, 85): 157.5,
(1280, 1024, 100): 190.96,
(1368, 768, 60): 85.86,
(1400, 1050, 60): 122.61,
(1400, 1050, 72): 149.34,
(1400, 1050, 75): 155.85,
(1400, 1050, 85): 179.26,
(1400, 1050, 100): 214.39,
(1440, 900, 60): 106.47,
(1600, 1200, 60): 162,
(1600, 1200, 65): 175.5,
(1600, 1200, 70): 189,
(1600, 1200, 75): 202.5,
(1600, 1200, 85): 229.5,
(1600, 1200, 100): 280.64,
(1680, 1050, 60): 147.14,
(1792, 1344, 60): 204.8,
(1792, 1344, 75): 261,
(1856, 1392, 60): 218.3,
(1856, 1392, 75): 288,
(1920, 1080, 60): 148.5,
(1920, 1200, 60): 193.16,
(1920, 1440, 60): 234,
(1920, 1440, 75): 297,
# custom resolutions tested on my monitors, but not officially supported
(400, 600, 60): 20,
(200, 600, 60): 10,
(400, 600, 72): 25,
}
# constants are
# h_front_porch, h_sync_width, h_back_porch, h_polarity
# v_front_porch, v_sync_width, v_back_porch, v_polarity
screen_constants = {
(640, 350, 70): (16, 96, 48, 1, 37, 2, 60, 0),
(640, 350, 85): (32, 64, 96, 1, 32, 3, 60, 0),
(640, 400, 70): (16, 96, 48, 0, 12, 2, 35, 1),
(640, 400, 85): (32, 64, 96, 0, 1, 3, 41, 1),
(640, 480, 60): (16, 96, 48, 0, 10, 2, 33, 0),
(640, 480, 73): (24, 40, 128, 0, 9, 2, 29, 0),
(640, 480, 75): (16, 64, 120, 0, 1, 3, 16, 0),
(640, 480, 85): (56, 56, 80, 0, 1, 3, 25, 0),
(640, 480, 100): (40, 64, 104, 0, 1, 3, 25, 1),
(720, 400, 85): (36, 72, 108, 0, 1, 3, 42, 1),
(768, 576, 60): (24, 80, 104, 0, 1, 3, 17, 1),
(768, 576, 72): (32, 80, 112, 0, 1, 3, 21, 1),
(768, 576, 75): (40, 80, 120, 0, 1, 3, 22, 1),
(768, 576, 85): (40, 80, 120, 0, 1, 3, 25, 1),
(768, 576, 100): (48, 80, 128, 0, 1, 3, 31, 1),
(800, 600, 56): (24, 72, 128, 1, 1, 2, 22, 1),
(800, 600, 60): (40, 128, 88, 1, 1, 4, 23, 1),
(800, 600, 72): (56, 120, 64, 1, 37, 6, 23, 1),
(800, 600, 75): (16, 80, 160, 1, 1, 3, 21, 1),
(800, 600, 85): (32, 64, 152, 1, 1, 3, 27, 1),
(800, 600, 100): (48, 88, 136, 0, 1, 3, 32, 1),
(1024, 768, 43): (8, 176, 56, 1, 0, 8, 41, 1),
(1024, 768, 60): (24, 136, 160, 0, 3, 6, 29, 0),
(1024, 768, 70): (24, 136, 144, 0, 3, 6, 29, 0),
(1024, 768, 75): (16, 96, 176, 1, 1, 3, 28, 1),
(1024, 768, 85): (48, 96, 208, 1, 1, 3, 36, 1),
(1024, 768, 100): (72, 112, 184, 0, 1, 3, 42, 1),
(1152, 864, 60): (64, 120, 184, 0, 1, 3, 27, 1),
(1152, 864, 75): (64, 128, 256, 1, 1, 3, 32, 1),
(1152, 864, 85): (72, 128, 200, 0, 1, 3, 39, 1),
(1152, 864, 100): (80, 128, 208, 0, 1, 3, 47, 1),
(1280, 720, 60): (72, 80, 216, 1, 3, 5, 22, 1),
(1280, 800, 60): (64, 136, 200, 0, 1, 3, 24, 1),
(1280, 960, 60): (80, 136, 216, 0, 1, 3, 30, 1),
(1280, 960, 72): (88, 136, 224, 0, 1, 3, 37, 1),
(1280, 960, 75): (88, 136, 224, 0, 1, 3, 38, 1),
(1280, 960, 85): (64, 160, 224, 1, 1, 3, 47, 1),
(1280, 960, 100): (96, 144, 240, 0, 1, 3, 53, 1),
(1280, 1024, 60): (48, 112, 248, 1, 1, 3, 38, 1),
(1280, 1024, 75): (16, 144, 248, 1, 1, 3, 38, 1),
(1280, 1024, 85): (64, 160, 224, 1, 1, 3, 44, 1),
(1280, 1024, 100): (96, 144, 240, 0, 1, 3, 57, 1),
(1368, 768, 60): (72, 144, 216, 0, 1, 3, 23, 1),
(1400, 1050, 60): (88, 152, 240, 0, 1, 3, 33, 1),
(1400, 1050, 72): (96, 152, 248, 0, 1, 3, 40, 1),
(1400, 1050, 75): (96, 152, 248, 0, 1, 3, 42, 1),
(1400, 1050, 85): (104, 152, 256, 0, 1, 3, 49, 1),
(1400, 1050, 100): (112, 152, 264, 0, 1, 3, 58, 1),
(1440, 900, 60): (80, 152, 232, 0, 1, 3, 28, 1),
(1600, 1200, 60): (64, 192, 304, 1, 1, 3, 46, 1),
(1600, 1200, 65): (64, 192, 304, 1, 1, 3, 46, 1),
(1600, 1200, 70): (64, 192, 304, 1, 1, 3, 46, 1),
(1600, 1200, 75): (64, 192, 304, 1, 1, 3, 46, 1),
(1600, 1200, 85): (64, 192, 304, 1, 1, 3, 46, 1),
(1600, 1200, 100): (128, 176, 304, 0, 1, 3, 67, 1),
(1680, 1050, 60): (104, 184, 288, 0, 1, 3, 33, 1),
(1792, 1344, 60): (128, 200, 328, 0, 1, 3, 46, 1),
(1792, 1344, 75): (96, 216, 352, 0, 1, 3, 69, 1),
(1856, 1392, 60): (96, 224, 352, 0, 1, 3, 43, 1),
(1856, 1392, 75): (128, 224, 352, 0, 1, 3, 104, 1),
(1920, 1080, 60): (88, 44, 148, 1, 4, 5, 36, 1),
(1920, 1200, 60): (128, 208, 336, 0, 1, 3, 38, 1),
(1920, 1440, 60): (128, 208, 344, 0, 1, 3, 56, 1),
(1920, 1440, 75): (144, 224, 352, 0, 1, 3, 56, 1),
# custom resolutions tested on my monitors, but not officially supported
(400, 600, 60): (20, 64, 44, 1, 1, 4, 23, 1),
(200, 600, 60): (10, 32, 22, 1, 1, 4, 23, 1),
(100, 600, 60): (5, 16, 11, 1, 1, 4, 23, 1),
(400, 600, 72): (28, 60, 32, 1, 37, 6, 23, 1),
}
# these parameters are necessary because the parameter calculator given by
# xilinx doesn't give optimal answers
# m and d parameters for xilinx single dcm
single_mul_div_50 = {
10: (2, 10),
20: (2, 5),
25: (2, 4),
31.5: (17, 27),
34.96: (7, 10),
35.5: (22, 31),
36: (18, 25),
40: (4, 5),
42.93: (6, 7),
43.16: (19, 22),
45.51: (10, 11),
49.5: (2, 2),
51.84: (28, 27),
56.25: (9, 8),
62.57: (5, 4),
65: (13, 10),
68.18: (15, 11),
75: (3, 2),
78.8: (30, 19),
81.62: (31, 19),
83.46: (5, 3),
85.86: (12, 7),
94.5: (17, 9),
102.1: (31, 15),
106.47: (32, 15),
119.65: (12, 5),
122.61: (27, 11),
124.54: (5, 2),
129.86: (13, 5),
135: (27, 10),
143.47: (23, 8),
149.34: (3, 1),
155.85: (28, 9),
157.5: (22, 7),
162: (13, 4),
175.5: (7, 2),
178.99: (25, 7),
179.26: (25, 7),
202.5: (4, 1),
204.8: (29, 7),
214.39: (30, 7),
229.5: (23, 5),
234: (14, 3),
261: (26, 5),
280.64: (28, 5),
288: (23, 4),
297: (6, 1),
}
# clkdv, m, and d parameters for xilinx chained dcm
chained_mul_div_50 = {
25.175: (3.5, 30, 17),
44.9: (3.5, 7, 22),
74.25: (3.5, 26, 5),
108: (2.5, 27, 5),
113.31: (7.5, 17, 1),
147.14: (0.5, 25, 17),
148.5: (3.5, 31, 3),
189: (4.5, 17, 1),
190.96: (5.5, 21, 1),
193.16: (2.5, 29, 3),
218.3: (5.5, 24, 1),
}
# dcm's can't always be exactly accurate. these are the actual output
# frequencies of the dcm's
actual_clock_freq = {
10: 10,
20: 20,
25: 25,
25.175: 25.210084033613445,
31.5: 31.48148148148148,
34.96: 35.0,
35.5: 35.483870967741936,
36: 36.0,
40: 40.0,
42.93: 42.857142857142854,
43.16: 43.18181818181818,
44.9: 44.89795918367347,
45.51: 45.45454545454545,
49.5: 50.0,
51.84: 51.851851851851855,
56.25: 56.25,
62.57: 62.5,
65: 65,
68.18: 68.18181818181819,
74.25: 74.28571428571429,
75: 75.0,
78.8: 78.94736842105263,
81.62: 81.57894736842105,
83.46: 83.33333333333333,
85.86: 85.71428571428571,
94.5: 94.44444444444444,
102.1: 103.33333333333333,
106.47: 106.66666666666667,
108: 108,
113.31: 113.33333333333333,
119.65: 120.0,
122.61: 122.72727272727273,
124.54: 125.0,
129.86: 130.0,
135: 135.0,
143.47: 143.75,
147.14: 147.05882352941177,
148.5: 147.61904761904762,
149.34: 150.0,
155.85: 155.55555555555554,
157.5: 157.14285714285714,
162: 162.5,
175.5: 175.0,
178.99: 178.57142857142858,
179.26: 178.57142857142858,
189: 188.88888888888889,
190.96: 190.9090909090909,
193.16: 193.33333333333331,
202.5: 200.0,
204.8: 207.14285714285714,
214.39: 214.28571428571428,
218.3: 218.1818181818182,
229.5: 230.0,
234: 233.33333333333334,
261: 260.0,
280.64: 280.0,
288: 287.5,
297: 300.0,
}
|
__version__ = '0.1.2'
from mini_api.api import Server, HTTPStatus
|
import os
import logging
from util import kubernetes
from util import config
from temporal.workerfactory import WorkerFactory
from temporal.workflow import workflow_method, Workflow, WorkflowClient
logging.basicConfig(level=logging.INFO)
kube = kubernetes.Cluster()
paul_config = config.Configuration()
worker_config = paul_config.read_workflow_config("server_count")
temporal_config = paul_config.read_temporal_config()
TASK_QUEUE = worker_config.get("task_queue")
if TASK_QUEUE is None:
raise Exception("Missing worker task_queue configuration!")
NAMESPACE = temporal_config.get("namespace")
if NAMESPACE is None:
raise Exception("Missing temporal namespace configuration!")
def count_running_servers(game_type: str = None):
if game_type is None:
server_list = kube.api.list_pod_for_all_namespaces(label_selector=f"gaming.turnbros.app/role=server")
else:
server_list = kube.api.list_pod_for_all_namespaces(label_selector=f"gaming.turnbros.app/type={game_type}")
return len(server_list.items)
class Workflow:
@workflow_method(task_queue=TASK_QUEUE)
async def execute(self, payload: dict):
game_type = payload.get("gametype")
if game_type == '':
game_type = None
server_count = count_running_servers(game_type)
if game_type is not None:
if server_count > 0:
return f"I found {server_count} running {game_type} servers"
return f"Unfortunately I couldn't find any {game_type} servers..."
if server_count > 0:
return f"Sure thing! There are {server_count} running servers in total"
return "Something may have gone wrong because I didn't find any servers..."
async def worker_main():
# Simple check to see if we're outside k8s
if os.getenv("KUBERNETES_SERVICE_HOST", False):
temporal_host = temporal_config.get("host")
else:
temporal_host = "localhost"
temporal_port = temporal_config.get("port")
client = WorkflowClient.new_client(host=temporal_host, port=temporal_port, namespace=NAMESPACE)
factory = WorkerFactory(client, NAMESPACE)
worker = factory.new_worker(TASK_QUEUE)
worker.register_workflow_implementation_type(Workflow)
factory.start()
logging.info("Worker started") |
from collections import namedtuple
from datetime import date
from dateutil.relativedelta import relativedelta
from flask import redirect, url_for, flash, render_template
from flask.blueprints import Blueprint
from flask_login import current_user
from flask_login.utils import login_required
from loguru import logger
import pandas as pd
from strength_log.models import Post, User, GeneralSetting
training_volume = Blueprint("training_volume", __name__)
TrainingVolume = namedtuple("TrainingVolume", ["lift", "date", "volume"])
@training_volume.route("/training-volume")
@login_required
def view_training_volume():
user = User.query.get(current_user.id)
# Check if the user is not authenticated from email, do not give access
if not user.authenticated:
flash("Account must be authenticated to access Training Volume.", "danger")
return redirect(url_for("main.index"))
six_months_ago = date.today() - relativedelta(months=6)
logger.debug(six_months_ago)
posts = Post.query.filter(
Post.author == current_user, Post.timestamp >= six_months_ago
)
squat_training_volumes = list()
bench_training_volumes = list()
deadlift_training_volumes = list()
press_training_volumes = list()
for post in posts:
volume = 0
main_lift = post.main_lift
for single_set in post.sets:
volume += single_set.get("reps") * single_set.get("weight")
if main_lift == "squat":
squat_training_volumes.append(
TrainingVolume(main_lift, post.timestamp, volume)
)
elif main_lift == "bench":
bench_training_volumes.append(
TrainingVolume(main_lift, post.timestamp, volume)
)
elif main_lift == "press":
press_training_volumes.append(
TrainingVolume(main_lift, post.timestamp, volume)
)
else:
deadlift_training_volumes.append(
TrainingVolume(main_lift, post.timestamp, volume)
)
label_string_format = "%m-%d-%y"
if len(squat_training_volumes) > 0:
squat_df = pd.DataFrame.from_records(
squat_training_volumes, columns=TrainingVolume._fields
)
squat_df = squat_df.resample("W", on="date").sum().reset_index()
squat_training_volumes = [
TrainingVolume("squat", row.date.strftime(label_string_format), row.volume)
for _, row in squat_df.iterrows()
]
if len(bench_training_volumes) > 0:
bench_df = pd.DataFrame.from_records(
bench_training_volumes, columns=TrainingVolume._fields
)
bench_df = bench_df.resample("W", on="date").sum().reset_index()
bench_training_volumes = [
TrainingVolume("bench", row.date.strftime(label_string_format), row.volume)
for _, row in bench_df.iterrows()
]
if len(press_training_volumes) > 0:
press_df = pd.DataFrame.from_records(
press_training_volumes, columns=TrainingVolume._fields
)
press_df = press_df.resample("W", on="date").sum().reset_index()
press_training_volumes = [
TrainingVolume("press", row.date.strftime(label_string_format), row.volume)
for _, row in press_df.iterrows()
]
if len(deadlift_training_volumes) > 0:
deadlift_df = pd.DataFrame.from_records(
deadlift_training_volumes, columns=TrainingVolume._fields
)
deadlift_df = deadlift_df.resample("W", on="date").sum().reset_index()
deadlift_training_volumes = [
TrainingVolume(
"deadlift", row.date.strftime(label_string_format), row.volume
)
for _, row in deadlift_df.iterrows()
]
settings = GeneralSetting.query.filter_by(user_id=user.id).first()
if not settings:
unit = "lbs"
else:
unit = settings.unit
return render_template(
"training-volume.html",
title="Weekly Training Volume",
squat_volume=squat_training_volumes,
bench_volume=bench_training_volumes,
deadlift_volume=deadlift_training_volumes,
press_volume=press_training_volumes,
unit=unit,
)
|
"""Tests
>>> arr = ['G', 'B', 'R', 'R', 'B', 'R', 'G']
>>> sort_rgb(arr)
>>> print(arr)
['R', 'R', 'R', 'G', 'G', 'B', 'B']
>>> arr = ['R', 'B', 'G']
>>> sort_rgb(arr)
>>> print(arr)
['R', 'G', 'B']
"""
from typing import List
def sort_rgb(arr: List[str]):
offset = _sort_key(arr, 'R', 0)
_sort_key(arr, 'G', offset)
def _sort_key(arr, key, offset):
i = offset
j = len(arr) - 1
while i < j:
if arr[i] == key:
i += 1
elif arr[j] != key:
j -= 1
else:
arr[i], arr[j] = arr[j], arr[i]
return i
if __name__ == "__main__":
import doctest
doctest.testmod()
|
import argparse
import os
import cv2
from . import models
from . import profile
def plot_correction(img, disk_attr, args, plotter):
stack = profile.extract_stack(img, disk_attr, args['slices'])
stack_clean = profile.clean_stack(stack)
intensity_profile = profile.compress_stack(stack_clean)
model = models.Linear()
model.fit(intensity_profile)
print("Linearity of correction: {}".format(model.coefs_str()))
# TODO: implement kwargs forwarding for plot_profile?
plotter.plot_profile(intensity_profile, zorder=1, color='brown',
label="Corrected profile")
plotter.plot_model("Corrected", model, zorder=1, color='cyan')
def overlay_mec(img, disk_attr, color=(0, 255, 0)):
x, y, r = disk_attr
thickness = int(round(r/200)) # Reasonable thickness for different sizes.
cv2.circle(img, (x, y), r, color, thickness)
cv2.rectangle(img=img, pt1=(x-thickness, y-thickness),
pt2=(x+thickness, y+thickness), color=color, thickness=-1)
cv2.putText(img,
text='x: {}, y: {}, r: {}'.format(*disk_attr),
org=(20, img.shape[0]-20),
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
fontScale=thickness/2,
color=color,
thickness=thickness)
return img
def _pos_int(arg):
try:
val = int(arg)
except ValueError:
raise argparse.ArgumentTypeError("{} could not be interpreted as an "
"integer.".format(arg))
else:
if val <= 0:
raise argparse.ArgumentError(val, "must be a positive integer.")
return val
def _uint8(arg):
try:
val = int(arg)
except ValueError:
raise argparse.ArgumentTypeError("{} could not be interpreted as an "
"integer.".format(arg))
else:
if val < 0 or val > 255:
raise argparse.ArgumentError(val,
"must be within the range (0, 255)")
return val
def _str2bool(arg):
if arg.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif arg.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def parse_input(config):
ap = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Model and correct for limb darkening in a solar image.")
ap.add_argument("-i", "--image",
required=True,
help="Path to a jpg or png solar image file.")
ap.add_argument("-o", "--operation",
choices=config["operations"],
default=config["operations"][0],
help="Operation to perform.")
ap.add_argument("-s", "--slices",
type=_pos_int,
default=config["slices"],
help="Number of slices to average to create the intensity "
"profile.")
ap.add_argument("-t", "--threshold",
type=_uint8,
default=config["threshold"],
help="Brightness threshold for the solar disk (uint8).")
ap.add_argument("-b", "--bias",
type=_uint8,
default=config["bias"],
help="Brightness bias for the correction (uint8).")
ap.add_argument("-d", "--debug",
type=_str2bool,
nargs="?",
const=True,
default=config["debug"],
help="Provide intermediary output to the debug directory.")
ap.add_argument("-m", "--model",
choices=config["models"],
default=config["models"][0],
help="How to model the limb darkening.")
ap.add_argument("-p", "--model_parameter",
help="Model parameters, e.g. degree for polynomial.")
ap.add_argument("-r", "--reference_model",
choices=config["reference_models"],
const=None,
help="Whether and which reference model to plot.")
ap.add_argument("-P", "--plot_correction",
type=_str2bool,
nargs="?",
const=True,
default=config["plot_correction"],
help="Feed correction back to assess flatness.")
ap.add_argument("-I", "--interactive_plot",
type=_str2bool,
nargs="?",
const=True,
default=config["interactive_plot"],
help="Show instead of saving plots.")
ap.add_argument("-S", "--separate_dir",
type=_str2bool,
nargs="?",
const=True,
default=config["separate_dir"],
help="Generate separate output directories per image.")
ap.add_argument("--out_dir",
default=config["out_dir"],
help="Path to a custom output directory.")
ap.add_argument("--debug_dir",
default=config["debug_dir"],
help="Path to a custom debug directory.")
args = vars(ap.parse_args())
if not os.path.isfile(args['image']):
raise argparse.ArgumentTypeError(
"{} is not a path to a file.".format(args['image']))
return args
def generate_output_paths(args):
basename = os.path.basename(args['image'])
root, ext = os.path.splitext(basename)
out_dir = args["out_dir"]
if args["separate_dir"]:
out_dir = os.path.join(out_dir, root)
os.makedirs(out_dir, exist_ok=True)
paths = {
'intensity': "{}/{}_intensity{}".format(out_dir, root, ext),
'corrected': "{}/{}_corrected_{}{}".format(out_dir, root, args['bias'],
ext),
'plot': "{}/{}_plot.png".format(out_dir, root)
}
if args['debug']:
print("Debug mode active.")
debug_dir = args["debug_dir"] or args["out_dir"]
if args["separate_dir"]:
debug_dir = os.path.join(debug_dir, root)
os.makedirs(debug_dir, exist_ok=True)
paths['stack'] = "{}/{}_stack{}".format(debug_dir, root, ext)
paths['stack_clean'] = "{}/{}_stack_clean{}".format(debug_dir, root,
ext)
paths['mec'] = "{}/{}_mec{}".format(debug_dir, root, ext)
return paths |
import unittest
import numpy as np
from neet.boolean import ECA, RewiredECA
from neet.network import Network
from neet.boolean.network import BooleanNetwork
class TestRewiredECA(unittest.TestCase):
"""
Unit tests of the RewiredECA class
"""
def test_is_network(self):
"""
Ensure that RewiredECA meets the requirement of a network
"""
self.assertTrue(isinstance(RewiredECA(23, size=3), Network))
self.assertTrue(isinstance(RewiredECA(23, size=3), BooleanNetwork))
self.assertTrue(isinstance(RewiredECA(
30, wiring=[[-1, 0, 1], [0, 1, 2], [1, 2, 3]]), Network))
self.assertTrue(isinstance(RewiredECA(
30, wiring=[[-1, 0, 1], [0, 1, 2], [1, 2, 3]]), BooleanNetwork))
def test_invalid_code(self):
"""
Ensure that init fails when an invalid Wolfram code is provided
"""
with self.assertRaises(ValueError):
RewiredECA(-1, size=3)
with self.assertRaises(ValueError):
RewiredECA(256, size=3)
with self.assertRaises(TypeError):
RewiredECA("30", size=3)
def test_invalid_boundary(self):
"""
Ensure that init fails when an invalid boundary condition is provided
"""
with self.assertRaises(TypeError):
RewiredECA(30, boundary=[1, 2], size=3)
with self.assertRaises(ValueError):
RewiredECA(30, boundary=(1, 0, 1), size=3)
with self.assertRaises(ValueError):
RewiredECA(30, boundary=(1, 2), size=3)
def test_invalid_size(self):
"""
Ensure that init fails when an invalid size is provided
"""
with self.assertRaises(TypeError):
RewiredECA(30, size="3")
with self.assertRaises(ValueError):
RewiredECA(30, size=-1)
with self.assertRaises(ValueError):
RewiredECA(30, size=0)
def test_invalid_wiring(self):
"""
Ensure that init fails when an invalid wiring is provided
"""
with self.assertRaises(TypeError):
RewiredECA(30, wiring=5)
with self.assertRaises(TypeError):
RewiredECA(30, wiring="apples")
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[])
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[-1, 0, 1])
with self.assertRaises(ValueError):
RewiredECA(30, wiring=np.asarray([-1, 0, 1]))
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[[0], [0]])
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[[0, 0], [0], [0]])
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[[-2], [0], [0]])
with self.assertRaises(ValueError):
RewiredECA(30, wiring=[[2], [0], [0]])
def test_invalid_size_wiring(self):
"""
Ensure that size and wiring are not both provided, but at least one is
"""
with self.assertRaises(ValueError):
RewiredECA(30, size=3, wiring=[])
with self.assertRaises(ValueError):
RewiredECA(30)
with self.assertRaises(ValueError):
RewiredECA(30, boundary=(0, 0))
def test_size_init(self):
"""
Ensure that size initialization is working correctly
"""
eca = RewiredECA(30, size=2)
self.assertEqual(30, eca.code)
self.assertEqual(2, eca.size)
self.assertTrue(np.array_equal([[-1, 0], [0, 1], [1, 2]], eca.wiring))
eca = RewiredECA(23, boundary=(1, 0), size=5)
self.assertEqual(23, eca.code)
self.assertEqual(5, eca.size)
self.assertTrue(
np.array_equal([[-1, 0, 1, 2, 3], [0, 1, 2, 3, 4],
[1, 2, 3, 4, 5]],
eca.wiring))
def test_wiring_init(self):
"""
Ensure that wiring initialization is working correctly
"""
eca = RewiredECA(30, wiring=[[0], [0], [0]])
self.assertEqual(30, eca.code)
self.assertEqual(1, eca.size)
self.assertTrue(np.array_equal([[0], [0], [0]], eca.wiring))
eca = RewiredECA(23, boundary=(1, 0), wiring=[
[-1, -1, 0], [0, 1, 1], [2, 0, -1]])
self.assertEqual(23, eca.code)
self.assertEqual(3, eca.size)
self.assertTrue(
np.array_equal([[-1, -1, 0], [0, 1, 1], [2, 0, -1]],
eca.wiring))
def test_setting_wiring(self):
"""
Ensure that we cannot reshape the wiring
"""
eca = RewiredECA(30, size=2)
with self.assertRaises(AttributeError):
eca.wiring = np.asarray([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
self.assertEqual(-1, eca.wiring[0, 0])
eca.wiring[0, 0] = 0
self.assertEqual(0, eca.wiring[0, 0])
def test_invalid_lattice_size(self):
"""
Ensure that update fails when the lattice is the wrong size
"""
eca = RewiredECA(30, size=3)
with self.assertRaises(ValueError):
eca.update([])
with self.assertRaises(ValueError):
eca.update([0])
with self.assertRaises(ValueError):
eca.update([0, 0])
def test_invalid_lattice_state(self):
"""
Ensure that the states of the lattice are binary states
"""
eca = RewiredECA(30, size=3)
with self.assertRaises(ValueError):
eca.update([-1, 0, 1])
with self.assertRaises(ValueError):
eca.update([1, 0, -1])
with self.assertRaises(ValueError):
eca.update([2, 0, 0])
with self.assertRaises(ValueError):
eca.update([1, 0, 2])
with self.assertRaises(ValueError):
eca.update([[1], [0], [2]])
with self.assertRaises(ValueError):
eca.update("101")
def test_reproduce_closed_ecas(self):
"""
Ensure that RewiredECAs can reproduce closed ECAs
"""
reca = RewiredECA(30, size=7)
eca = ECA(30, size=7)
state = [0, 0, 0, 1, 0, 0, 0]
for _ in range(10):
expect = eca.update(np.copy(state))
got = reca.update(state)
self.assertTrue(np.array_equal(expect, got))
def test_reproduce_open_ecas(self):
"""
Ensure that RewiredECAs can reproduce open ECAs
"""
reca = RewiredECA(30, boundary=(1, 0), size=7)
eca = ECA(30, size=7, boundary=(1, 0))
state = [0, 0, 0, 1, 0, 0, 0]
for _ in range(10):
expect = eca.update(np.copy(state))
got = reca.update(state)
self.assertTrue(np.array_equal(expect, got))
def test_rewired_network(self):
"""
Test a non-trivially rewired network
"""
reca = RewiredECA(30, wiring=[
[-1, 0, 1, 2, 3], [0, 1, 2, 3, 4], [1, 2, 3, 4, 5]
])
state = [0, 0, 0, 0, 1]
self.assertEqual([1, 0, 0, 1, 1], reca.update(state))
reca.wiring[:, :] = [
[0, 4, 1, 2, 3], [0, 1, 2, 3, 4], [0, 2, 3, 4, 5]
]
state = [0, 0, 0, 0, 1]
self.assertEqual([0, 1, 0, 1, 1], reca.update(state))
self.assertEqual([0, 0, 0, 1, 0], reca.update(state))
self.assertEqual([0, 0, 1, 1, 1], reca.update(state))
self.assertEqual([0, 0, 1, 0, 0], reca.update(state))
self.assertEqual([0, 1, 1, 1, 0], reca.update(state))
self.assertEqual([0, 1, 0, 0, 1], reca.update(state))
self.assertEqual([0, 0, 1, 1, 1], reca.update(state))
def test_reca_invalid_index(self):
"""
Test for invalid index arguments
"""
reca = RewiredECA(30, wiring=[
[0, 4, 1, 2, 3], [0, 1, 2, 3, 4], [0, 2, 3, 4, 5]
])
with self.assertRaises(IndexError):
reca.update([0, 0, 0, 0, 1], index=6)
with self.assertRaises(IndexError):
reca.update([0, 0, 0, 0, 1], index=-1)
def test_reca_index(self):
"""
Test the index argument
"""
reca = RewiredECA(30, wiring=[
[0, 4, 1, 2, 3], [0, 1, 2, 3, 4], [0, 2, 3, 4, 5]
])
self.assertEqual([0, 0, 0, 1, 1], reca.update([0, 0, 0, 0, 1], index=3))
self.assertEqual([0, 0, 1, 1, 1], reca.update([0, 0, 0, 1, 1], index=2))
self.assertEqual([0, 1, 1, 1, 0], reca.update([0, 0, 1, 1, 0], index=1))
self.assertEqual([0, 1, 0, 1, 0], reca.update([0, 1, 0, 1, 0], index=0))
def test_reca_pin_none(self):
"""
Ensure that pin behaves correctly for nil arguments
"""
reca = RewiredECA(30, size=5)
xs = [0, 0, 1, 0, 0]
self.assertEqual([0, 1, 1, 1, 0], reca.update(xs, pin=None))
self.assertEqual([1, 1, 0, 0, 1], reca.update(xs, pin=[]))
def test_reca_pin_index_clash(self):
"""
Ensure ValueError is raised when index and pin are provided
"""
reca = RewiredECA(30, size=5)
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=0, pin=[1])
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=1, pin=[1])
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=1, pin=[0, 1])
def test_reca_pin(self):
"""
Test the pin argument
"""
reca = RewiredECA(30, wiring=[
[-1, 4, 1, 2, -1], [0, 1, 2, 3, 4], [0, 2, 3, 4, 5]
])
xs = [0, 0, 1, 0, 0]
self.assertEqual([0, 0, 1, 1, 0], reca.update(xs, pin=[1]))
self.assertEqual([0, 1, 1, 1, 0], reca.update(xs, pin=[3]))
self.assertEqual([0, 1, 1, 1, 0], reca.update(xs, pin=[3, 2]))
self.assertEqual([0, 1, 0, 1, 0], reca.update(xs, pin=[-2]))
reca.boundary = (1, 1)
xs = [0, 0, 1, 0, 0]
self.assertEqual([1, 0, 1, 0, 0], reca.update(xs, pin=[1, 3]))
self.assertEqual([1, 1, 1, 0, 0], reca.update(xs, pin=[-2, -5]))
self.assertEqual([1, 1, 1, 1, 0], reca.update(xs, pin=[0, 2]))
def test_reca_values_none(self):
"""
Ensure that values behaves correctly for nil arguments
"""
reca = RewiredECA(30, size=5)
xs = [0, 0, 1, 0, 0]
self.assertEqual([0, 1, 1, 1, 0], reca.update(xs, values=None))
self.assertEqual([1, 1, 0, 0, 1], reca.update(xs, values={}))
def test_reca_invalid_values(self):
"""
Ensure ValueError is raised for invalid values
"""
reca = RewiredECA(30, size=5)
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], values={0: 2})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], values={0: -1})
def test_reca_values_index_clash(self):
"""
Ensure ValueError is raised when index and values are both provided
"""
reca = RewiredECA(30, size=5)
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=0, values={0: 1})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=1, values={1: 0})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], index=1, values={0: 0, 1: 0})
def test_reca_values_pin_clash(self):
"""
Ensure ValueError is raised when pin and values are both provided
"""
reca = RewiredECA(30, size=5)
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], pin=[0], values={0: 1})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], pin=[1], values={1: 0})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], pin=[1], values={0: 0, 1: 0})
with self.assertRaises(ValueError):
reca.update([0, 0, 0, 0, 0], pin=[1, 0], values={0: 0, 1: 0})
def test_reca_values(self):
"""
Test the values argument
"""
reca = RewiredECA(30, wiring=[
[-1, 4, 1, 2, -1], [0, 1, 2, 3, 4], [0, 2, 3, 4, 5]
])
xs = [0, 0, 1, 0, 0]
self.assertEqual([1, 1, 1, 1, 0], reca.update(xs, values={0: 1}))
self.assertEqual([1, 1, 0, 0, 0], reca.update(xs, values={-1: 0}))
self.assertEqual([1, 1, 1, 1, 1], reca.update(xs, values={-2: 1}))
self.assertEqual([1, 0, 1, 0, 0], reca.update(
xs, values={2: 1, -5: 1}))
reca.boundary = (1, 1)
xs = [0, 0, 1, 0, 0]
self.assertEqual([0, 1, 1, 1, 0], reca.update(xs, values={0: 0}))
self.assertEqual([1, 1, 0, 0, 1], reca.update(xs, values={-1: 1}))
self.assertEqual([0, 0, 1, 0, 0], reca.update(xs, values={-2: 0}))
self.assertEqual([0, 1, 0, 1, 0], reca.update(
xs, values={2: 0, -5: 0}))
|
'''Preprocessing data.'''
import os
import numpy as np
import cv2
from tensorflow.python.keras.preprocessing.image import DirectoryIterator as Keras_DirectoryIterator
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator as Keras_ImageDataGenerator
from tensorflow.python.keras.preprocessing.image import load_img, img_to_array, array_to_img
from tensorflow.python.keras.backend import floatx
np.random.seed(3)
def resize_image(
x,
size_target=(448, 448),
rate_scale=1.0,
flg_keep_aspect=False,
flg_random_scale=False):
'''Resizing image.
Args:
x: input image.
size_target: a tuple (height, width) of the target size.
rate_scale: scale rate.
flg_keep_aspect: a bool of keeping image aspect or not.
flg_random_scale: a bool of scaling image randomly.
Returns:
Resized image.
'''
# Convert to numpy array
if not isinstance(x, np.ndarray):
img = np.asarray(x)
else:
img = x
# Calculate resize coefficients
if len(img.shape) == 4:
_o, size_height_img, size_width_img, _c , = img.shape
img = img[0]
elif len(img.shape) == 3:
size_height_img, size_width_img, _c , = img.shape
if len(size_target) == 1:
size_heigth_target = size_target
size_width_target = size_target
if len(size_target) == 2:
size_heigth_target = size_target[0]
size_width_target = size_target[1]
if size_target is None:
size_heigth_target = size_height_img * rate_scale
size_width_target = size_width_img * rate_scale
coef_height, coef_width = 1, 1
if size_height_img < size_heigth_target:
coef_height = size_heigth_target / size_height_img
if size_width_img < size_width_target:
coef_width = size_width_target / size_width_img
# Calculate coeffieient to match small size to target size
## scale coefficient if specified
low_scale = rate_scale
if flg_random_scale:
low_scale = 1.0
coef_max = max(coef_height, coef_width) * np.random.uniform(low=low_scale, high=rate_scale)
# Resize image
size_height_resize = np.ceil(size_height_img*coef_max)
size_width_resize = np.ceil(size_width_img*coef_max)
method_interpolation = cv2.INTER_CUBIC
if flg_keep_aspect:
img_resized = cv2.resize(
img,
dsize=(int(size_width_resize), int(size_height_resize)),
interpolation=method_interpolation)
else:
img_resized = cv2.resize(
img,
dsize=(
int(size_width_target*np.random.uniform(low=low_scale, high=rate_scale)),
int(size_heigth_target*np.random.uniform(low=low_scale, high=rate_scale))),
interpolation=method_interpolation)
return img_resized
def center_crop_image(x, size_target=(448, 448)):
'''Crop image from center point.
Args:
x: input image.
size_target: a tuple (height, width) of the target size.
Returns:
Center cropped image.
'''
# Convert to numpy array
if not isinstance(x, np.ndarray):
img = np.asarray(x)
else:
img = x
# Set size
if len(size_target) == 1:
size_heigth_target = size_target
size_width_target = size_target
if len(size_target) == 2:
size_heigth_target = size_target[0]
size_width_target = size_target[1]
if len(img.shape) == 4:
_o, size_height_img, size_width_img, _c, = img.shape
img = img[0]
elif len(img.shape) == 3:
size_height_img, size_width_img, _c, = img.shape
# Crop image
h_start = int((size_height_img - size_heigth_target) / 2)
w_start = int((size_width_img - size_width_target) / 2)
img_cropped = img[h_start:h_start+size_heigth_target, w_start:w_start+size_width_target, :]
return img_cropped
def random_crop_image(x, size_target=(448, 448)):
'''Crop image from random point.
Args:
x: input image.
size_target: a tuple (height, width) of the target size.
Returns:
Random cropped image.
'''
# Convert to numpy array
if not isinstance(x, np.ndarray):
img = np.asarray(x)
else:
img = x
# Set size
if len(size_target) == 1:
size_heigth_target = size_target
size_width_target = size_target
if len(size_target) == 2:
size_heigth_target = size_target[0]
size_width_target = size_target[1]
if len(img.shape) == 4:
_o, size_height_img, size_width_img, _c , = img.shape
img = img[0]
elif len(img.shape) == 3:
size_height_img, size_width_img, _c , = img.shape
# Crop image
margin_h = (size_height_img - size_heigth_target)
margin_w = (size_width_img - size_width_target)
h_start = 0
w_start = 0
if margin_h != 0:
h_start = np.random.randint(low=0, high=margin_h)
if margin_w != 0:
w_start = np.random.randint(low=0, high=margin_w)
img_cropped = img[h_start:h_start+size_heigth_target, w_start:w_start+size_width_target, :]
return img_cropped
def horizontal_flip_image(x):
'''Flip image horizontally.
Args:
x: input image.
Returns:
Horizontal flipped image.
'''
if np.random.random() >= 0.5:
return x[:, ::-1, :]
else:
return x
def normalize_image(x, mean=(0., 0., 0.), std=(1.0, 1.0, 1.0)):
'''Normalization.
Args:
x: input image.
mean: mean value of the input image.
std: standard deviation value of the input image.
Returns:
Normalized image.
'''
x = np.asarray(x, dtype=np.float32)
if len(x.shape) == 4:
for dim in range(3):
x[:, :, :, dim] = (x[:, :, :, dim] - mean[dim]) / std[dim]
if len(x.shape) == 3:
for dim in range(3):
x[:, :, dim] = (x[:, :, dim] - mean[dim]) / std[dim]
return x
def preprocess_input(x):
'''Preprocesses a tensor or Numpy array encoding a batch of images.'''
return normalize_image(x, mean=[123.82988033, 127.3509729, 110.25606303])
class DirectoryIterator(Keras_DirectoryIterator):
'''Inherit from keras' DirectoryIterator.'''
def _get_batches_of_transformed_samples(self, index_array):
batch_x = np.zeros(
(len(index_array),) + self.image_shape,
dtype=floatx())
grayscale = self.color_mode == 'grayscale'
# Build batch of image data
for i, j in enumerate(index_array):
fname = self.filenames[j]
img = load_img(
os.path.join(self.directory, fname),
grayscale=grayscale,
target_size=None,
interpolation=self.interpolation)
x = img_to_array(img, data_format=self.data_format)
# Pillow images should be closed after `load_img`, but not PIL images.
if hasattr(img, 'close'):
img.close()
x = self.image_data_generator.standardize(x)
batch_x[i] = x
# Optionally save augmented images to disk for debugging purposes
if self.save_to_dir:
for i, j in enumerate(index_array):
img = array_to_img(batch_x[i], self.data_format, scale=True)
fname = '{prefix}_{index}_{hash}.{format}'.format(
prefix=self.save_prefix,
index=j,
hash=np.random.randint(1e7),
format=self.save_format)
img.save(os.path.join(self.save_to_dir, fname))
# Build batch of labels
if self.class_mode == 'input':
batch_y = batch_x.copy()
elif self.class_mode == 'sparse':
batch_y = self.classes[index_array]
elif self.class_mode == 'binary':
batch_y = self.classes[index_array].astype(floatx())
elif self.class_mode == 'categorical':
batch_y = np.zeros(
(len(batch_x), self.num_classes),
dtype=floatx())
for i, label in enumerate(self.classes[index_array]):
batch_y[i, label] = 1.
else:
return batch_x
return batch_x, batch_y
class ImageDataGenerator(Keras_ImageDataGenerator):
'''Inherit from keras' ImageDataGenerator.'''
def flow_from_directory(
self, directory,
target_size=(256, 256), color_mode='rgb',
classes=None, class_mode='categorical',
batch_size=16, shuffle=True, seed=None,
save_to_dir=None,
save_prefix='',
save_format='png',
follow_links=False,
subset=None,
interpolation='nearest'
):
return DirectoryIterator(
directory, self,
target_size=target_size, color_mode=color_mode,
classes=classes, class_mode=class_mode,
data_format=self.data_format,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format,
follow_links=follow_links,
subset=subset,
interpolation=interpolation)
if __name__ == "__main__":
pass
|
'''
Author: Karan Sharma
Probleum:
Page 28 at [link]
'''
import hashlib as hasher
from core import create_genesis_block, Block
import datetime as date
class BlockChain:
def __init__(self):
self.headBlock = Block("Genesis Block")
self.headHash = self.headBlock.hashMe()
def add_block(self,data):
new_block = Block(data, self.headHash,self.headBlock)
self.headBlock = new_block
self.headHash = self.headBlock.hashMe()
print("Block added!")
print("Index: "+str(self.headBlock.index))
print("Timestamp: "+str(self.headBlock.timestamp))
print("Hash: "+str(self.headHash))
def demonstrate(self):
num_of_blocks_to_add = 5
for i in range(0, num_of_blocks_to_add):
self.add_block("Sample data")
newChain = BlockChain()
newChain.demonstrate()
|
import matplotlib.pyplot as plt
import pandas as pd
import plotly.graph_objects as go
import numpy as np
import plotly.io as pio
pio.renderers.default = "browser"
df = pd.read_csv("../Dataset.csv")
x = df["Area"]
y = df["Price"]
f = go.FigureWidget([go.Scatter(x=x, y=y, mode='markers')])
scatter = f.data[0]
colors = ['#a3a7e4'] * 100
scatter.marker.color = colors
scatter.marker.size = [10] * 100
f.layout.hovermode = 'closest'
# create our callback function
def update_point(trace, points, selector):
c = list(scatter.marker.color)
s = list(scatter.marker.size)
for i in points.point_inds:
c[i] = '#bae2be'
s[i] = 20
with f.batch_update():
scatter.marker.color = c
scatter.marker.size = s
scatter.on_click(update_point)
f.show()
|
# -*- coding: utf-8 -*-
import tensorflow as tf
import sys
def negative_l1_distance(x1, x2, axis=1):
"""
Negative L1 Distance.
.. math:: L = - \\sum_i \\abs(x1_i - x2_i)
:param x1: First term.
:param x2: Second term.
:param axis: Reduction Indices.
:return: Similarity Value.
"""
distance = tf.reduce_sum(tf.abs(x1 - x2), axis=axis)
return - distance
def negative_l2_distance(x1, x2, axis=1):
"""
Negative L2 Distance.
.. math:: L = - \\sqrt{\\sum_i (x1_i - x2_i)^2}
:param x1: First term.
:param x2: Second term.
:param axis: Reduction Indices.
:return: Similarity Value.
"""
distance = tf.sqrt(tf.reduce_sum(tf.square(x1 - x2), axis=axis))
return - distance
def negative_square_l2_distance(x1, x2, axis=1):
"""
Negative Square L2 Distance.
.. math:: L = - \\sum_i (x1_i - x2_i)^2
:param x1: First term.
:param x2: Second term.
:param axis: Reduction Indices.
:return: Similarity Value.
"""
distance = tf.reduce_sum(tf.square(x1 - x2), axis=axis)
return - distance
def dot_product(x1, x2, axis=1):
"""
Dot Product.
.. math:: L = \\sum_i x1_i x2_i
:param x1: First term.
:param x2: Second term.
:param axis: Reduction Indices.
:return: Similarity Value.
"""
similarity = tf.reduce_sum(x1 * x2, axis=axis)
return similarity
# Aliases
l1 = L1 = negative_l1_distance
l2 = L2 = negative_l2_distance
l2_sqr = L2_SQR = negative_square_l2_distance
dot = DOT = dot_product
def get_function(function_name):
this_module = sys.modules[__name__]
if not hasattr(this_module, function_name):
raise ValueError('Unknown similarity function: {}'.format(function_name))
return getattr(this_module, function_name)
|
import ipaddress
import itertools
import logging
from collections import deque
from ipaddress import IPv4Address, IPv6Address
from typing import Dict, List, Optional, Union
from h2.config import H2Configuration
from h2.connection import H2Connection
from h2.errors import ErrorCodes
from h2.events import (
Event, ConnectionTerminated, DataReceived, ResponseReceived,
SettingsAcknowledged, StreamEnded, StreamReset, UnknownFrameReceived,
WindowUpdated
)
from h2.exceptions import FrameTooLargeError, H2Error
from twisted.internet.defer import Deferred
from twisted.internet.error import TimeoutError
from twisted.internet.interfaces import IHandshakeListener, IProtocolNegotiationFactory
from twisted.internet.protocol import connectionDone, Factory, Protocol
from twisted.internet.ssl import Certificate
from twisted.protocols.policies import TimeoutMixin
from twisted.python.failure import Failure
from twisted.web.client import URI
from zope.interface import implementer
from scrapy.core.http2.stream import Stream, StreamCloseReason
from scrapy.http import Request
from scrapy.settings import Settings
from scrapy.spiders import Spider
logger = logging.getLogger(__name__)
PROTOCOL_NAME = b"h2"
class InvalidNegotiatedProtocol(H2Error):
def __init__(self, negotiated_protocol: bytes) -> None:
self.negotiated_protocol = negotiated_protocol
def __str__(self) -> str:
return (f"Expected {PROTOCOL_NAME!r}, received {self.negotiated_protocol!r}")
class RemoteTerminatedConnection(H2Error):
def __init__(
self,
remote_ip_address: Optional[Union[IPv4Address, IPv6Address]],
event: ConnectionTerminated,
) -> None:
self.remote_ip_address = remote_ip_address
self.terminate_event = event
def __str__(self) -> str:
return f'Received GOAWAY frame from {self.remote_ip_address!r}'
class MethodNotAllowed405(H2Error):
def __init__(self, remote_ip_address: Optional[Union[IPv4Address, IPv6Address]]) -> None:
self.remote_ip_address = remote_ip_address
def __str__(self) -> str:
return f"Received 'HTTP/2.0 405 Method Not Allowed' from {self.remote_ip_address!r}"
@implementer(IHandshakeListener)
class H2ClientProtocol(Protocol, TimeoutMixin):
IDLE_TIMEOUT = 240
def __init__(self, uri: URI, settings: Settings, conn_lost_deferred: Deferred) -> None:
"""
Arguments:
uri -- URI of the base url to which HTTP/2 Connection will be made.
uri is used to verify that incoming client requests have correct
base URL.
settings -- Scrapy project settings
conn_lost_deferred -- Deferred fires with the reason: Failure to notify
that connection was lost
"""
self._conn_lost_deferred = conn_lost_deferred
config = H2Configuration(client_side=True, header_encoding='utf-8')
self.conn = H2Connection(config=config)
# ID of the next request stream
# Following the convention - 'Streams initiated by a client MUST
# use odd-numbered stream identifiers' (RFC 7540 - Section 5.1.1)
self._stream_id_generator = itertools.count(start=1, step=2)
# Streams are stored in a dictionary keyed off their stream IDs
self.streams: Dict[int, Stream] = {}
# If requests are received before connection is made we keep
# all requests in a pool and send them as the connection is made
self._pending_request_stream_pool: deque = deque()
# Save an instance of errors raised which lead to losing the connection
# We pass these instances to the streams ResponseFailed() failure
self._conn_lost_errors: List[BaseException] = []
# Some meta data of this connection
# initialized when connection is successfully made
self.metadata: Dict = {
# Peer certificate instance
'certificate': None,
# Address of the server we are connected to which
# is updated when HTTP/2 connection is made successfully
'ip_address': None,
# URI of the peer HTTP/2 connection is made
'uri': uri,
# Both ip_address and uri are used by the Stream before
# initiating the request to verify that the base address
# Variables taken from Project Settings
'default_download_maxsize': settings.getint('DOWNLOAD_MAXSIZE'),
'default_download_warnsize': settings.getint('DOWNLOAD_WARNSIZE'),
# Counter to keep track of opened streams. This counter
# is used to make sure that not more than MAX_CONCURRENT_STREAMS
# streams are opened which leads to ProtocolError
# We use simple FIFO policy to handle pending requests
'active_streams': 0,
# Flag to keep track if settings were acknowledged by the remote
# This ensures that we have established a HTTP/2 connection
'settings_acknowledged': False,
}
@property
def h2_connected(self) -> bool:
"""Boolean to keep track of the connection status.
This is used while initiating pending streams to make sure
that we initiate stream only during active HTTP/2 Connection
"""
return bool(self.transport.connected) and self.metadata['settings_acknowledged']
@property
def allowed_max_concurrent_streams(self) -> int:
"""We keep total two streams for client (sending data) and
server side (receiving data) for a single request. To be safe
we choose the minimum. Since this value can change in event
RemoteSettingsChanged we make variable a property.
"""
return min(
self.conn.local_settings.max_concurrent_streams,
self.conn.remote_settings.max_concurrent_streams
)
def _send_pending_requests(self) -> None:
"""Initiate all pending requests from the deque following FIFO
We make sure that at any time {allowed_max_concurrent_streams}
streams are active.
"""
while (
self._pending_request_stream_pool
and self.metadata['active_streams'] < self.allowed_max_concurrent_streams
and self.h2_connected
):
self.metadata['active_streams'] += 1
stream = self._pending_request_stream_pool.popleft()
stream.initiate_request()
self._write_to_transport()
def pop_stream(self, stream_id: int) -> Stream:
"""Perform cleanup when a stream is closed
"""
stream = self.streams.pop(stream_id)
self.metadata['active_streams'] -= 1
self._send_pending_requests()
return stream
def _new_stream(self, request: Request, spider: Spider) -> Stream:
"""Instantiates a new Stream object
"""
stream = Stream(
stream_id=next(self._stream_id_generator),
request=request,
protocol=self,
download_maxsize=getattr(spider, 'download_maxsize', self.metadata['default_download_maxsize']),
download_warnsize=getattr(spider, 'download_warnsize', self.metadata['default_download_warnsize']),
)
self.streams[stream.stream_id] = stream
return stream
def _write_to_transport(self) -> None:
""" Write data to the underlying transport connection
from the HTTP2 connection instance if any
"""
# Reset the idle timeout as connection is still actively sending data
self.resetTimeout()
data = self.conn.data_to_send()
self.transport.write(data)
def request(self, request: Request, spider: Spider) -> Deferred:
if not isinstance(request, Request):
raise TypeError(f'Expected scrapy.http.Request, received {request.__class__.__qualname__}')
stream = self._new_stream(request, spider)
d = stream.get_response()
# Add the stream to the request pool
self._pending_request_stream_pool.append(stream)
# If we receive a request when connection is idle
# We need to initiate pending requests
self._send_pending_requests()
return d
def connectionMade(self) -> None:
"""Called by Twisted when the connection is established. We can start
sending some data now: we should open with the connection preamble.
"""
# Initialize the timeout
self.setTimeout(self.IDLE_TIMEOUT)
destination = self.transport.getPeer()
self.metadata['ip_address'] = ipaddress.ip_address(destination.host)
# Initiate H2 Connection
self.conn.initiate_connection()
self._write_to_transport()
def _lose_connection_with_error(self, errors: List[BaseException]) -> None:
"""Helper function to lose the connection with the error sent as a
reason"""
self._conn_lost_errors += errors
self.transport.loseConnection()
def handshakeCompleted(self) -> None:
"""
Close the connection if it's not made via the expected protocol
"""
if self.transport.negotiatedProtocol is not None and self.transport.negotiatedProtocol != PROTOCOL_NAME:
# we have not initiated the connection yet, no need to send a GOAWAY frame to the remote peer
self._lose_connection_with_error([InvalidNegotiatedProtocol(self.transport.negotiatedProtocol)])
def _check_received_data(self, data: bytes) -> None:
"""Checks for edge cases where the connection to remote fails
without raising an appropriate H2Error
Arguments:
data -- Data received from the remote
"""
if data.startswith(b'HTTP/2.0 405 Method Not Allowed'):
raise MethodNotAllowed405(self.metadata['ip_address'])
def dataReceived(self, data: bytes) -> None:
# Reset the idle timeout as connection is still actively receiving data
self.resetTimeout()
try:
self._check_received_data(data)
events = self.conn.receive_data(data)
self._handle_events(events)
except H2Error as e:
if isinstance(e, FrameTooLargeError):
# hyper-h2 does not drop the connection in this scenario, we
# need to abort the connection manually.
self._conn_lost_errors += [e]
self.transport.abortConnection()
return
# Save this error as ultimately the connection will be dropped
# internally by hyper-h2. Saved error will be passed to all the streams
# closed with the connection.
self._lose_connection_with_error([e])
finally:
self._write_to_transport()
def timeoutConnection(self) -> None:
"""Called when the connection times out.
We lose the connection with TimeoutError"""
# Check whether there are open streams. If there are, we're going to
# want to use the error code PROTOCOL_ERROR. If there aren't, use
# NO_ERROR.
if (
self.conn.open_outbound_streams > 0
or self.conn.open_inbound_streams > 0
or self.metadata['active_streams'] > 0
):
error_code = ErrorCodes.PROTOCOL_ERROR
else:
error_code = ErrorCodes.NO_ERROR
self.conn.close_connection(error_code=error_code)
self._write_to_transport()
self._lose_connection_with_error([
TimeoutError(f"Connection was IDLE for more than {self.IDLE_TIMEOUT}s")
])
def connectionLost(self, reason: Failure = connectionDone) -> None:
"""Called by Twisted when the transport connection is lost.
No need to write anything to transport here.
"""
# Cancel the timeout if not done yet
self.setTimeout(None)
# Notify the connection pool instance such that no new requests are
# sent over current connection
if not reason.check(connectionDone):
self._conn_lost_errors.append(reason)
self._conn_lost_deferred.callback(self._conn_lost_errors)
for stream in self.streams.values():
if stream.metadata['request_sent']:
close_reason = StreamCloseReason.CONNECTION_LOST
else:
close_reason = StreamCloseReason.INACTIVE
stream.close(close_reason, self._conn_lost_errors, from_protocol=True)
self.metadata['active_streams'] -= len(self.streams)
self.streams.clear()
self._pending_request_stream_pool.clear()
self.conn.close_connection()
def _handle_events(self, events: List[Event]) -> None:
"""Private method which acts as a bridge between the events
received from the HTTP/2 data and IH2EventsHandler
Arguments:
events -- A list of events that the remote peer triggered by sending data
"""
for event in events:
if isinstance(event, ConnectionTerminated):
self.connection_terminated(event)
elif isinstance(event, DataReceived):
self.data_received(event)
elif isinstance(event, ResponseReceived):
self.response_received(event)
elif isinstance(event, StreamEnded):
self.stream_ended(event)
elif isinstance(event, StreamReset):
self.stream_reset(event)
elif isinstance(event, WindowUpdated):
self.window_updated(event)
elif isinstance(event, SettingsAcknowledged):
self.settings_acknowledged(event)
elif isinstance(event, UnknownFrameReceived):
logger.warning('Unknown frame received: %s', event.frame)
# Event handler functions starts here
def connection_terminated(self, event: ConnectionTerminated) -> None:
self._lose_connection_with_error([
RemoteTerminatedConnection(self.metadata['ip_address'], event)
])
def data_received(self, event: DataReceived) -> None:
try:
stream = self.streams[event.stream_id]
except KeyError:
pass # We ignore server-initiated events
else:
stream.receive_data(event.data, event.flow_controlled_length)
def response_received(self, event: ResponseReceived) -> None:
try:
stream = self.streams[event.stream_id]
except KeyError:
pass # We ignore server-initiated events
else:
stream.receive_headers(event.headers)
def settings_acknowledged(self, event: SettingsAcknowledged) -> None:
self.metadata['settings_acknowledged'] = True
# Send off all the pending requests as now we have
# established a proper HTTP/2 connection
self._send_pending_requests()
# Update certificate when our HTTP/2 connection is established
self.metadata['certificate'] = Certificate(self.transport.getPeerCertificate())
def stream_ended(self, event: StreamEnded) -> None:
try:
stream = self.pop_stream(event.stream_id)
except KeyError:
pass # We ignore server-initiated events
else:
stream.close(StreamCloseReason.ENDED, from_protocol=True)
def stream_reset(self, event: StreamReset) -> None:
try:
stream = self.pop_stream(event.stream_id)
except KeyError:
pass # We ignore server-initiated events
else:
stream.close(StreamCloseReason.RESET, from_protocol=True)
def window_updated(self, event: WindowUpdated) -> None:
if event.stream_id != 0:
self.streams[event.stream_id].receive_window_update()
else:
# Send leftover data for all the streams
for stream in self.streams.values():
stream.receive_window_update()
@implementer(IProtocolNegotiationFactory)
class H2ClientFactory(Factory):
def __init__(self, uri: URI, settings: Settings, conn_lost_deferred: Deferred) -> None:
self.uri = uri
self.settings = settings
self.conn_lost_deferred = conn_lost_deferred
def buildProtocol(self, addr) -> H2ClientProtocol:
return H2ClientProtocol(self.uri, self.settings, self.conn_lost_deferred)
def acceptableProtocols(self) -> List[bytes]:
return [PROTOCOL_NAME]
|
from django.db import models
# Create your models here.
class Ganador(models.Model):
DEPARTAMENTOS = (
(0, 'Ninguno'),
(1, 'Atlántida'),
(2, 'Choluteca'),
(3, 'Colón'),
(4, 'Comayagua'),
(5, 'Copán'),
(6, 'Cortes'),
(7, 'El Paraíso'),
(8, 'Francisco Morazán'),
(9, 'Gracias a Dios'),
(10, 'Intibucá'),
(11, 'Islas de la Bahía'),
(12, 'La Paz'),
(13, 'Lempira'),
(14, 'Ocotepeque'),
(15, 'Olancho'),
(16, 'Santa Bárbara'),
(17, 'Valle'),
(18, 'Yoro'),
)
nombre = models.CharField(max_length=200)
apellido = models.CharField(max_length=200)
celular = models.CharField(max_length=200)
departamento_id = models.IntegerField(default=0, choices=DEPARTAMENTOS)
fecha_creado = models.DateTimeField('Fecha Creado', auto_now_add=True)
def __str__(self): # __unicode__ on Python
return self.nombre + " " + self.apellido |
class Restaurant:
def __init__(self, name, cuisine_type):
self.name = name
self.type = cuisine_type
def describe_restaurant(self):
print(f"{self.name} is a restaurant that sells {self.type} food")
def open_restaurant(self):
print("The restaurant is open")
bob = Restaurant("Bob's Burgers", "American")
joe = Restaurant("Joe's Salami", "Croatian")
steve = Restaurant("Steve's Calamari", "Greek")
joe.describe_restaurant()
steve.describe_restaurant()
bob.describe_restaurant()
|
# Math Module Part 2
import math
# Factorial & Square Root
print(math.factorial(3))
print(math.sqrt(64))
# Greatest Common Denominator GCD
print(math.gcd(52, 8))
print(math.gcd(8, 52))
print(8/52)
print(2/13)
# Degrees and Radians
print(math.radians(360))
print(math.degrees(math.pi * 2))
|
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class UserServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_users_by_id(self, id, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_users_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_users_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_users_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users(self, **kwargs): # noqa: E501
"""Gets a list of users # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional filter by IsHidden=true or false
:param bool is_disabled: Optional filter by IsDisabled=true or false
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_users_with_http_info(**kwargs) # noqa: E501
return data
def get_users_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of users # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional filter by IsHidden=true or false
:param bool is_disabled: Optional filter by IsDisabled=true or false
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['is_hidden', 'is_disabled'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'is_hidden' in params:
query_params.append(('IsHidden', params['is_hidden'])) # noqa: E501
if 'is_disabled' in params:
query_params.append(('IsDisabled', params['is_disabled'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserDto]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_by_id(self, id, **kwargs): # noqa: E501
"""Gets a user by Id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_users_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_users_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets a user by Id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_public(self, **kwargs): # noqa: E501
"""Gets a list of publicly visible users for display on a login screen. # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_public(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_public_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_users_public_with_http_info(**kwargs) # noqa: E501
return data
def get_users_public_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of publicly visible users for display on a login screen. # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_public_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_public" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/Public', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserDto]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_authenticatebyname(self, body, x_emby_authorization, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
Authenticate a user by nane and password. A 200 status code indicates success, while anything in the 400 or 500 range indicates failure --- No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_authenticatebyname(body, x_emby_authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUserByName body: AuthenticateUserByName (required)
:param str x_emby_authorization: The authorization header can be either named 'Authorization' or 'X-Emby-Authorization'. It must be of the following schema: Emby UserId=\"(guid)\", Client=\"(string)\", Device=\"(string)\", DeviceId=\"(string)\", Version=\"string\", Token=\"(string)\" Please consult the documentation for further details. (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, **kwargs) # noqa: E501
else:
(data) = self.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, **kwargs) # noqa: E501
return data
def post_users_authenticatebyname_with_http_info(self, body, x_emby_authorization, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
Authenticate a user by nane and password. A 200 status code indicates success, while anything in the 400 or 500 range indicates failure --- No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUserByName body: AuthenticateUserByName (required)
:param str x_emby_authorization: The authorization header can be either named 'Authorization' or 'X-Emby-Authorization'. It must be of the following schema: Emby UserId=\"(guid)\", Client=\"(string)\", Device=\"(string)\", DeviceId=\"(string)\", Version=\"string\", Token=\"(string)\" Please consult the documentation for further details. (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'x_emby_authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_authenticatebyname" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_authenticatebyname`") # noqa: E501
# verify the required parameter 'x_emby_authorization' is set
if ('x_emby_authorization' not in params or
params['x_emby_authorization'] is None):
raise ValueError("Missing the required parameter `x_emby_authorization` when calling `post_users_authenticatebyname`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_emby_authorization' in params:
header_params['X-Emby-Authorization'] = params['x_emby_authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/AuthenticateByName', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthenticationAuthenticationResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id(self, body, id, **kwargs): # noqa: E501
"""Updates a user # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDto body: UserDto: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDto body: UserDto: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_authenticate(self, body, id, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_authenticate(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUser body: AuthenticateUser (required)
:param str id: (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_authenticate_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_authenticate_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_authenticate_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_authenticate_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUser body: AuthenticateUser (required)
:param str id: (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_authenticate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_authenticate`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_authenticate`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Authenticate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthenticationAuthenticationResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_configuration(self, body, id, **kwargs): # noqa: E501
"""Updates a user configuration # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_configuration(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ConfigurationUserConfiguration body: UserConfiguration: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_configuration_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_configuration_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_configuration_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user configuration # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_configuration_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ConfigurationUserConfiguration body: UserConfiguration: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_configuration`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Configuration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_easypassword(self, body, id, **kwargs): # noqa: E501
"""Updates a user's easy password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_easypassword(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserEasyPassword body: UpdateUserEasyPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_easypassword_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_easypassword_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_easypassword_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user's easy password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_easypassword_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserEasyPassword body: UpdateUserEasyPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_easypassword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_easypassword`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_easypassword`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/EasyPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_password(self, body, id, **kwargs): # noqa: E501
"""Updates a user's password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_password(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserPassword body: UpdateUserPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_password_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_password_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_password_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user's password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_password_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserPassword body: UpdateUserPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_password`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Password', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_policy(self, body, id, **kwargs): # noqa: E501
"""Updates a user policy # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_policy(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UsersUserPolicy body: UserPolicy: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_policy_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_policy_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_policy_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user policy # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_policy_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UsersUserPolicy body: UserPolicy: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_policy`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Policy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_forgotpassword(self, body, **kwargs): # noqa: E501
"""Initiates the forgot password process for a local user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPassword body: ForgotPassword (required)
:return: UsersForgotPasswordResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_forgotpassword_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_forgotpassword_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_forgotpassword_with_http_info(self, body, **kwargs): # noqa: E501
"""Initiates the forgot password process for a local user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPassword body: ForgotPassword (required)
:return: UsersForgotPasswordResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_forgotpassword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_forgotpassword`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/ForgotPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersForgotPasswordResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_forgotpassword_pin(self, body, **kwargs): # noqa: E501
"""Redeems a forgot password pin # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_pin(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPin body: ForgotPasswordPin (required)
:return: UsersPinRedeemResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_forgotpassword_pin_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_forgotpassword_pin_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_forgotpassword_pin_with_http_info(self, body, **kwargs): # noqa: E501
"""Redeems a forgot password pin # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_pin_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPin body: ForgotPasswordPin (required)
:return: UsersPinRedeemResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_forgotpassword_pin" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_forgotpassword_pin`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/ForgotPassword/Pin', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersPinRedeemResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_new(self, body, **kwargs): # noqa: E501
"""Creates a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_new(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateUserByName body: CreateUserByName (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_new_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_new_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_new_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_new_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateUserByName body: CreateUserByName (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_new" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_new`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/New', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
#!/usr/bin/env python
"""MPUSensorTest.py: Receives raw data from MPU 9DOF click IMU+Magnetometer and displays it."""
import smbus
import math
# Power management registers
power_mgmt_1 = 0x6b
power_mgmt_2 = 0x6c
def read_byte(adr):
return bus.read_byte_data(address, adr)
def read_word(adr):
high = bus.read_byte_data(address, adr)
low = bus.read_byte_data(address, adr+1)
val = (high << 8) + low
return val
def read_word_2c(adr):
val = read_word(adr)
if (val >= 0x8000):
return -((65535 - val) + 1)
else:
return val
def dist(a, b):
return math.sqrt((a*a) + (b*b))
def get_y_rotation(x, y, z):
radians = math.atan2(x, dist(y, z))
return -math.degrees(radians)
def get_x_rotation(x, y, z):
radians = math.atan2(y, dist(x, z))
return math.degrees(radians)
bus = smbus.SMBus(1) # or bus = smbus.SMBus(0) for Revision 1 boards
address = 0x69 # Address via i2cdetect
# Now wake up the IMU as it starts in sleep mode
bus.write_byte_data(address, power_mgmt_1, 0)
print "Gyro data"
print "---------"
gyro_xout = read_word_2c(0x43)
gyro_yout = read_word_2c(0x45)
gyro_zout = read_word_2c(0x47)
print "gyro_xout: {}, scaled: {}".format(gyro_xout, gyro_xout/131.0)
print "gyro_yout: {}, scaled: {}".format(gyro_yout, gyro_yout/131.0)
print "gyro_zout: {}, scaled: {}".format(gyro_zout, gyro_zout/131.0)
print
print "Accelerometer data"
print "------------------"
accel_xout = read_word_2c(0x3b)
accel_yout = read_word_2c(0x3d)
accel_zout = read_word_2c(0x3f)
accel_xout_scaled = accel_xout/16384.0
accel_yout_scaled = accel_yout/16384.0
accel_zout_scaled = accel_zout/16384.0
print "accel_xout: {}, scaled: {}".format(accel_xout, accel_xout_scaled)
print "accel_yout: {}, scaled: {}".format(accel_yout, accel_yout_scaled)
print "accel_zout: {}, scaled: {}".format(accel_zout, accel_zout_scaled)
print "x rotation: {}".format(get_x_rotation(accel_xout_scaled, accel_yout_scaled, accel_zout_scaled))
print "y rotation: {}".format(get_y_rotation(accel_xout_scaled, accel_yout_scaled, accel_zout_scaled))
|
import re
import discord
from src.utils import *
class Pokeutilities:
def __init__(self, user_id: int=0, preferred_catch_type: str="base"):
self.deriver_id = 704130818339242094
self.user_id = user_id
self.preferred_catch_type = preferred_catch_type
if self.preferred_catch_type not in ('base', 'cjk'):
self.preferred_catch_type = "base"
def is_valid_spawn(self, message: discord.Message) -> bool:
if message.author.id == self.deriver_id:
if message.embeds:
title = str(message.embeds[0].title)
if title.startswith("A wild pok") and title.endswith('appeared!'):
return True
return False
async def process_caught(self, message: discord.Message) -> None:
caught_regex = re.compile(rf'Congratulations <@{self.user_id}>! You caught a level (\d+) (\w+)! \((\d+\.\d+)\%\)')
caught_shiny_regex = re.compile(rf'Congratulations <@{self.user_id}>! You caught a level (\d+) Shiny (\w+)! \((\d+\.\d+)\%\)')
if message.author.id == self.deriver_id:
if caught_regex.search(message.content):
pokemon_name = caught_regex.search(message.content).group(2)
pokemon_level = caught_regex.search(message.content).group(1)
print(f'caught! name: {pokemon_name} level: {pokemon_level} in {message.channel.name} | {message.guild.name}')
if caught_shiny_regex.search(message.content):
pokemon_name = caught_regex.search(message.content).group(2)
pokemon_level = caught_regex.search(message.content).group(1)
print(f'shiny caught! name: {pokemon_name} level: {pokemon_level} in {message.channel.name} | {message.guild.name}')
async def catch_pokemon(self, message: discord.Message):
image = message.embeds[0].image.url
pokemon = recognize_pokemon(image, self.preferred_catch_type)
await message.channel.send(f"<@{self.deriver_id}> catch {pokemon.lower()}")
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package pulls images from a Docker Registry."""
import argparse
import logging
import sys
import tarfile
from containerregistry.client import docker_creds
from containerregistry.client import docker_name
from containerregistry.client.v2 import docker_image as v2_image
from containerregistry.client.v2_2 import docker_http
from containerregistry.client.v2_2 import docker_image as v2_2_image
from containerregistry.client.v2_2 import docker_image_list as image_list
from containerregistry.client.v2_2 import save
from containerregistry.client.v2_2 import v2_compat
from containerregistry.tools import logging_setup
from containerregistry.tools import patched
from containerregistry.transport import retry
from containerregistry.transport import transport_pool
import httplib2
parser = argparse.ArgumentParser(
description='Pull images from a Docker Registry.')
parser.add_argument(
'--name',
action='store',
help=('The name of the docker image to pull and save. '
'Supports fully-qualified tag or digest references.'))
parser.add_argument(
'--tarball', action='store', help='Where to save the image tarball.')
_DEFAULT_TAG = 'i-was-a-digest'
_PROCESSOR_ARCHITECTURE = 'amd64'
_OPERATING_SYSTEM = 'linux'
# Today save.tarball expects a tag, which is emitted into one or more files
# in the resulting tarball. If we don't translate the digest into a tag then
# the tarball format leaves us no good way to represent this information and
# folks are left having to tag the resulting image ID (yuck). As a datapoint
# `docker save -o /tmp/foo.tar bar@sha256:deadbeef` omits the v1 "repositories"
# file and emits `null` for the `RepoTags` key in "manifest.json". By doing
# this we leave a trivial breadcrumb of what the image was named (and the digest
# is recoverable once the image is loaded), which is a strictly better UX IMO.
# We do not need to worry about collisions by doing this here because this tool
# only packages a single image, so this is preferable to doing something similar
# in save.py itself.
def _make_tag_if_digest(
name):
if isinstance(name, docker_name.Tag):
return name
return docker_name.Tag('{repo}:{tag}'.format(
repo=str(name.as_repository()), tag=_DEFAULT_TAG))
def main():
logging_setup.DefineCommandLineArgs(parser)
args = parser.parse_args()
logging_setup.Init(args=args)
if not args.name or not args.tarball:
logging.fatal('--name and --tarball are required arguments.')
sys.exit(1)
retry_factory = retry.Factory()
retry_factory = retry_factory.WithSourceTransportCallable(httplib2.Http)
transport = transport_pool.Http(retry_factory.Build, size=8)
if '@' in args.name:
name = docker_name.Digest(args.name)
else:
name = docker_name.Tag(args.name)
# OCI Image Manifest is compatible with Docker Image Manifest Version 2,
# Schema 2. We indicate support for both formats by passing both media types
# as 'Accept' headers.
#
# For reference:
# OCI: https://github.com/opencontainers/image-spec
# Docker: https://docs.docker.com/registry/spec/manifest-v2-2/
accept = docker_http.SUPPORTED_MANIFEST_MIMES
# Resolve the appropriate credential to use based on the standard Docker
# client logic.
try:
creds = docker_creds.DefaultKeychain.Resolve(name)
# pylint: disable=broad-except
except Exception as e:
logging.fatal('Error resolving credentials for %s: %s', name, e)
sys.exit(1)
try:
with tarfile.open(name=args.tarball, mode='w') as tar:
logging.info('Pulling manifest list from %r ...', name)
with image_list.FromRegistry(name, creds, transport) as img_list:
if img_list.exists():
platform = image_list.Platform({
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
})
# pytype: disable=wrong-arg-types
with img_list.resolve(platform) as default_child:
save.tarball(_make_tag_if_digest(name), default_child, tar)
return
# pytype: enable=wrong-arg-types
logging.info('Pulling v2.2 image from %r ...', name)
with v2_2_image.FromRegistry(name, creds, transport, accept) as v2_2_img:
if v2_2_img.exists():
save.tarball(_make_tag_if_digest(name), v2_2_img, tar)
return
logging.info('Pulling v2 image from %r ...', name)
with v2_image.FromRegistry(name, creds, transport) as v2_img:
with v2_compat.V22FromV2(v2_img) as v2_2_img:
save.tarball(_make_tag_if_digest(name), v2_2_img, tar)
return
# pylint: disable=broad-except
except Exception as e:
logging.fatal('Error pulling and saving image %s: %s', name, e)
sys.exit(1)
if __name__ == '__main__':
with patched.Httplib2():
main()
|
import torch
from torch.nn import functional as F
def swish(input, inplace=False):
return input * torch.sigmoid(input)
def mish(input, inplace=False):
return input * torch.tanh(F.softplus(input))
def hard_swish(input, inplace=False):
return input * hard_sigmoid(input)
def hard_sigmoid(input, inplace=False):
return F.relu6(input + 3).div(6.)
|
from dataclasses import dataclass
from typing import List
import numpy as np
import tensorflow as tf
from active_learning_ts.experiments.blueprint_element import BlueprintElement
from active_learning_ts.knowledge_discovery.discover_tasks.prim.prim import PRIM
from active_learning_ts.knowledge_discovery.knowledge_discovery_task import KnowledgeDiscoveryTask
from active_learning_ts.query_selection.query_sampler import QuerySampler
from active_learning_ts.queryable import Queryable
class PrimScenarioDiscoveryKnowledgeDiscoveryTask(KnowledgeDiscoveryTask):
def __init__(self, y_max: float = 1.0, y_min: float = 0.):
if y_max <= y_min:
raise ValueError('The minimum value cannot be greater or equal to the maximum value.')
y_max = float(y_max)
y_min = float(y_min)
self.y_max: tf.Tensor = tf.convert_to_tensor(y_max, dtype=tf.dtypes.float32)
self.y_min: tf.Tensor = tf.convert_to_tensor(y_min, dtype=tf.dtypes.float32)
self.range = self.y_max - self.y_min
self.prim = PRIM()
self.boxes: List = []
self.num_boxes = 0.
def post_init(self, surrogate_model: Queryable, sampler: QuerySampler):
super(PrimScenarioDiscoveryKnowledgeDiscoveryTask, self).post_init(surrogate_model, sampler)
if not (surrogate_model.point_shape == (2,) and surrogate_model.value_shape == (1,)):
raise ValueError('PrimScenarioDiscoveryKnowledgeDiscoveryTask requires a vector Surrogate input dimension '
'2 and output dimension 1')
def learn(self, num_queries):
if num_queries == 0:
return
x = self.sampler.sample(num_queries=num_queries)
data_set, data_values = self.surrogate_model.query(x)
x = data_set
y = (data_values - self.y_min) / self.range
self.prim.fit(x, y)
self.boxes.append((tf.convert_to_tensor(self.prim.box_[0], dtype=np.float32),
tf.convert_to_tensor(self.prim.box_[1], dtype=np.float32)))
self.num_boxes += 1.
def _uncertainty(self, point: tf.Tensor) -> tf.Tensor:
if len(self.boxes) == 0:
return tf.convert_to_tensor(.0, dtype=tf.dtypes.float32)
in_boxes = 0.
for a, b in self.boxes:
in_boxes += tf.case([(tf.reduce_any(tf.math.less(point, a)), lambda: 0.0),
(tf.reduce_any(tf.math.greater(point, b)), lambda: 0.0)], default=lambda: 1.0)
not_in_boxes = self.num_boxes - in_boxes
return tf.cast(tf.math.abs(tf.math.abs(in_boxes - not_in_boxes) - self.num_boxes) / self.num_boxes,
dtype=tf.dtypes.float32)
def uncertainty(self, points: tf.Tensor) -> tf.Tensor:
return tf.map_fn(lambda t: self._uncertainty(t), points, parallel_iterations=10)
@dataclass
class PrimScenarioDiscoveryKnowledgeDiscoveryTaskConfig(BlueprintElement[PrimScenarioDiscoveryKnowledgeDiscoveryTask]):
y_max: float = 1.0
y_min: float = 0.
klass = PrimScenarioDiscoveryKnowledgeDiscoveryTask
|
from filters import returnPercentIndex
import numpy as np
from scipy.signal import butter,filtfilt
import matplotlib.pyplot as plt
def baseline(x, y, fs, order = 2, cutoff=1, addPlot=False):
my = butter_lowpass_filter(y, cutoff, fs, order)
return touchBaselineRightToLeft(x, y, my,addPlot=addPlot)
def getRightStartingPosition(x, y, my, addPlot=False):
SelectedTPPos = RightTP(x, y, my, addPlot=addPlot)
if SelectedTPPos == 0:
# check that startup current is not present
SelectedTPPos = returnPercentIndex(y)
if SelectedTPPos < 2 or SelectedTPPos > 20:
# if we selected a wacky position, make sure we select the maximum in this range
SelectedTPPos = RightMaximum(x, y, SelectedTPPos)
if SelectedTPPos == 0:
# look for saddle
SelectedTPPos = RightTP(x, y, my, 2, addPlot=addPlot)
# print(SelectedTPPos)
return SelectedTPPos
def RightMaximum(x, y, SelectedTPPos):
V0p35 = 0
pos = 0
for xV in x:
if xV < 0.3 and V0p35 == 0:
V0p35 = pos
pos += 1
yMaxPos = y.index(max(y[1:V0p35]))
if SelectedTPPos > 20:
SelectedTPPos = 0
if yMaxPos > SelectedTPPos:
#print('Returning right maximum for pos: ' + str(yMaxPos))
return yMaxPos
return SelectedTPPos
def touchBaselineRightToLeft(x, y, my, softFilter = 1,addPlot=False):
yMod = []
rightIndex = getRightStartingPosition(x, y, my,addPlot=addPlot)
# strong assumption that we have a minimum
middleIndex = my.index(min(my))
leftArray = my[middleIndex:]
leftIndex = my.index(max(leftArray))
pos = 0
mx = len(my) - 1
maxIndex = leftIndex
#print('start')
#print(mx)
for yval in my[middleIndex:mx]:
realIndex = my.index(yval)
mReal = getGradient([x[realIndex], x[realIndex - softFilter]],[my[realIndex], my[realIndex - softFilter]])
#print(str(mReal) + ' > ')
if pos < mx and mReal < 0:
mLine = getGradient([x[realIndex], x[rightIndex]],[my[realIndex], my[rightIndex]])
#print(str(mLine))
if mReal * mLine > 0:
if abs(mReal) < abs(mLine) or realIndex == maxIndex:
leftIndex = realIndex
#print('leftIndex')
break
leftIndex = realIndex
pos += 1
#print(pos)
# check for a max between 0 and -0.2 V for leftside
mxYindex = y.index(max(y[middleIndex:mx]))
if x[mxYindex] < 0 and x[mxYindex] > -0.2:
leftIndex = mxYindex
mLine = getGradient([x[leftIndex], x[rightIndex]],[y[leftIndex], y[rightIndex]])
cLine = y[leftIndex] - mLine*x[leftIndex]
yMod = []
pos = 0
for v in x:
yMod.append(y[pos] - mLine*v - cLine)
pos += 1
if addPlot:
aa =[]
for val in x:
aa.append(mLine*val + cLine)
plt.figure(figsize=(12,6),dpi=120)
plt.plot(x, aa)
plt.plot(x, y)
plt.plot(x[leftIndex],y[leftIndex],'*')
plt.plot(x[maxIndex],y[maxIndex],'.')
plt.grid()
return [x, yMod, mLine, cLine]
return [x, yMod]
def getGradient(x, y):
try:
return (y[len(y) - 1] - y[0])/(x[len(x) - 1] - x[0])
except:
return 0
def returnPercentIndex(y, p = 20, dir = 'right'):
index = 0
yMod = []
if dir == 'right':
yMod = y
else:
for yval in reversed(y):
yMod.append(yval)
mx = len(yMod) - 1
for yCurrent in yMod:
if index < mx:
pChange = abs(100 - (yMod[index + 1]/yCurrent) * 100)
if(pChange < p):
#if index > 2:
# print('Returning percentage change at pos: ' + str(index) + '. Change: ' + '{:.2f}'.format(pChange) + '% < ' + str(p) + '%')
break
index += 1
if(index >= mx or index > 50):
print('Gradient too large for dir: ' + dir + ' at a ' + str(p) + ' slope')
index = 0
return index
def RightTP(dx, dy, my, df = 1, addPlot=False):
CUTOFF_VOLTAGE = 0.35
pos = 0
dmy = my
while pos < df:
dmy = np.diff(dmy)
pos += 1
pos = 1
mx = len(dmy) - 1
turningPoints = []
for val in dmy:
if pos < mx:
nxtVal = dmy[pos]
if nxtVal * val < 0:
turningPoints.append(pos)
pos += 1
# we might have more than one turning point
# look for the one that looks the most promising between 0.3 and 0.4 V
SelectedTPPos = 0
# we also know the SelectedTPPos can in no way stay 0
if len(turningPoints) > 1:
prevTP = 0
firstRun = True
for val in turningPoints:
if dx[val] >= CUTOFF_VOLTAGE:
# we have a TP that we can select
curTP = dy[val]
if firstRun:
firstRun = False
prevTP = curTP
SelectedTPPos = val
else:
if curTP > prevTP:
#print(SelectedTPPos)
prevTP = curTP
SelectedTPPos = val
# get all the turning points
if addPlot:
plt.plot(dx[0:len(dmy)],dmy)
plt.plot(dx[SelectedTPPos],dmy[SelectedTPPos],'*')
if df == 1:
plt.title(r'First differential')
else:
plt.title(r'Second differential')
plt.grid()
plt.show()
return SelectedTPPos
def butter_lowpass_filter(data, cutoff, fs, order,addPlot=False):
from scipy.signal import butter,filtfilt
from scipy import signal
nyq = 0.5 * fs # Nyquist Frequency
normal_cutoff = cutoff / nyq
# Get the filter coefficients
b, a = butter(order, normal_cutoff, btype='low', analog=True)
y = filtfilt(b, a, data)
if addPlot:
w, h = signal.freqs(b, a)
plt.semilogx(w, 20 * np.log10(abs(h)))
plt.title('Butterworth filter frequency response')
plt.xlabel('Frequency [radians / second]')
plt.ylabel('Amplitude [dB]')
#plt.margins(0, 0.1)
plt.grid(which='both', axis='both')
plt.axvline(normal_cutoff, color='green') # cutoff frequency
plt.xlim(normal_cutoff/10,normal_cutoff*10)
plt.show()
yMod = []
for val in y:
yMod.append(val)
return yMod |
DB_PATH = 'db.sqlite'
ADMIN_ID = 0
BOT_USERNAME = ''
API_ID = 0
API_HASH = ''
BOT_TOKEN = ''
PROXY = None
|
# Import module
import tensorflow as tf
import pickle
import re
class Model:
def __init__(self):
self.model = None
self.vocab = None
self.appos = {
"aren't": "are not",
"can't": "cannot",
"cant": "cannot",
"couldn't": "could not",
"didn't": "did not",
"doesn't": "does not",
"don't": "do not",
"hadn't": "had not",
"hasn't": "has not",
"haven't": "have not",
"he'd": "he would",
"he'll": "he will",
"he's": "he is",
"i'd": "I would",
"i'd": "I had",
"i'll": "I will",
"i'm": "I am",
"im": "I am",
"isn't": "is not",
"it's": "it is",
"it'll": "it will",
"i've": "I have",
"let's": "let us",
"mightn't": "might not",
"mustn't": "must not",
"shan't": "shall not",
"she'd": "she would",
"she'll": "she will",
"she's": "she is",
"shouldn't": "should not",
"that's": "that is",
"there's": "there is",
"they'd": "they would",
"they'll": "they will",
"they're": "they are",
"they've": "they have",
"we'd": "we would",
"we're": "we are",
"weren't": "were not",
"we've": "we have",
"what'll": "what will",
"what're": "what are",
"what's": "what is",
"what've": "what have",
"where's": "where is",
"who'd": "who would",
"who'll": "who will",
"who're": "who are",
"who's": "who is",
"who've": "who have",
"won't": "will not",
"wouldn't": "would not",
"you'd": "you would",
"you'll": "you will",
"you're": "you are",
"you've": "you have",
"'re": " are",
"wasn't": "was not",
"we'll": " will",
"didn't": "did not",
"gg": "going"
}
def preprocess_text(self, sentence):
text = re.sub('((www\.[^\s]+)|(https?://[^\s]+))',
'', sentence)
text = re.sub('@[^\s]+', '', text)
text = text.lower().split()
reformed = [self.appos[word]
if word in self.appos else word for word in text]
reformed = " ".join(reformed)
text = re.sub('&[^\s]+;', '', reformed)
text = re.sub('[^a-zA-Zа-яА-Я1-9]+', ' ', text)
text = re.sub(' +', ' ', text)
return text.strip()
def load_vocab(self, filename="vocab.pickle"):
print("Loading vocabulary files.....")
file_to_read = open(filename, "rb")
self.vocab = pickle.load(file_to_read)
file_to_read.close()
def load_the_model(self, modelname='my_sa_model'):
print("Loading model weights.....")
self.model = tf.keras.models.load_model(modelname)
def predict(self, sentence):
sentence = self.preprocess_text(sentence)
sentence = tf.constant(self.vocab[sentence.split()])
result = tf.squeeze(self.model(tf.reshape(sentence, (1, -1))))
return_dict = [('Neutral', result[0].numpy()), ('Positive',
result[1].numpy()), ('Negative', result[2].numpy())]
return_dict.sort(key=lambda x: x[1], reverse=True)
return return_dict
def stupid_infer(self, sentence):
if (len(sentence.split()) > 1):
return 'positive'
else:
return 'negative'
|
# Setup paths for module imports
from _unittest.conftest import local_path, scratch_path
# Import required modules
from pyaedt.generic.filesystem import Scratch
from pyaedt.generic.LoadAEDTFile import load_entire_aedt_file
import base64
import filecmp
import os
import sys
def _write_jpg(design_info, scratch):
"""writes the jpg Image64 property of the design info
to a temporary file and returns the filename"""
filename = os.path.join(scratch, design_info["DesignName"] + ".jpg")
image_data_str = design_info["Image64"]
with open(filename, "wb") as f:
if sys.version_info.major == 2:
bytes = bytes(image_data_str).decode("base64")
else:
bytes = base64.decodebytes(image_data_str.encode("ascii"))
f.write(bytes)
return filename
class TestHFSSProjectFile:
def setup_class(self):
with Scratch(scratch_path) as self.local_scratch:
hfss_file = os.path.join(local_path, "example_models", "Coax_HFSS.aedt")
self.project_dict = load_entire_aedt_file(hfss_file)
def teardown_class(self):
self.local_scratch.remove()
def test_01_check_top_level_keys(self):
assert list(self.project_dict.keys()) == ["AnsoftProject", "AllReferencedFilesForProject", "ProjectPreview"]
def test_02_check_design_info(self):
design_info = self.project_dict["ProjectPreview"]["DesignInfo"]
# there is one design in this aedt file, so DesignInfo will be a dict
assert isinstance(design_info, dict)
assert design_info["Factory"] == "HFSS"
assert design_info["DesignName"] == "HFSSDesign"
assert design_info["IsSolved"] == False
jpg_file = _write_jpg(design_info, self.local_scratch.path)
assert filecmp.cmp(jpg_file, os.path.join(local_path, "example_models", "Coax_HFSS.jpg"))
class TestProjectFileWithBinaryContent:
def test_01_check_can_load_aedt_file_with_binary_content(self):
aedt_file = os.path.join(local_path, "example_models", "assembly.aedt")
# implicitly this will test to make sure no exception is thrown by load_entire_aedt_file
self.project_dict = load_entire_aedt_file(aedt_file)
class TestProjectFileWithMultipleDesigns:
def setup_class(self):
with Scratch(scratch_path) as self.local_scratch:
aedt_file = os.path.join(local_path, "example_models", "Cassegrain.aedt")
self.project_dict = load_entire_aedt_file(aedt_file)
self.design_info = self.project_dict["ProjectPreview"]["DesignInfo"]
def teardown_class(self):
self.local_scratch.remove()
def test_01_check_design_type(self):
# there are multiple designs in this aedt file, so DesignInfo will be a list
assert isinstance(self.design_info, list)
def test_02_check_design_names(self):
design_names = [design["DesignName"] for design in self.design_info]
assert ["Cassegrain_Hybrid", "feeder", "Cassegrain_"] == design_names
def test_03_check_first_design_jpg(self):
jpg_file = _write_jpg(self.design_info[0], self.local_scratch.path)
assert filecmp.cmp(jpg_file, os.path.join(local_path, "example_models", "Cassegrain_Hybrid.jpg"))
|
import pytest
@pytest.fixture
def variable_factory(db):
"""
Returns a Variable instance.
"""
from persistent_settings.models import Variable
def factory(value, name="FOO"):
return Variable.objects.create(name=name, value=value)
return factory
@pytest.fixture
def request_obj(client):
from django.urls import reverse
return client.get(reverse("test")).wsgi_request
@pytest.fixture
def template_factory():
"""
Returns a Template factory.
"""
from django import template
def build_template_args(*args, **kwargs):
args_string = " ".join(map(lambda a: '"{}"'.format(a), args))
kwargs_string = " ".join(map(lambda k: '{}="{}"'.format(k, kwargs[k]), kwargs))
return " ".join((args_string, kwargs_string))
def factory(*args, **kwargs):
load = kwargs.pop("load", "persistent_settings")
tag_name = kwargs.pop("tag_name")
wrapper = kwargs.pop("wrapper", "p")
segments = (
"{{% load {} %}}".format(load),
"<{wrapper}>{{% {tag_name} {allargs} %}}</{wrapper}>".format(
tag_name=tag_name,
allargs=build_template_args(*args, **kwargs),
wrapper=wrapper,
),
)
template_string = "\n".join(segments)
return template.Template(template_string)
return factory
@pytest.fixture
def context_factory():
"""
Returns a Context factory to be used while rendering a Template instance.
"""
from django import template
def factory(**kwargs):
return template.Context(kwargs)
return factory
@pytest.fixture
def command_factory():
"""
Calls commands and returns StringIO.
"""
from io import StringIO
from django.core.management import call_command
def factory(command: str, *args, **kwargs) -> StringIO:
out = StringIO()
call_command(command, stdout=out, *args, **kwargs)
return out.getvalue()
return factory
|
import logging
import os
from pywps import FORMATS, ComplexInput, ComplexOutput, Format, LiteralInput, LiteralOutput, Process
from pywps.app.Common import Metadata
from pywps.response.status import WPS_STATUS
from pywps.inout.literaltypes import AllowedValue
from pywps.validator.allowed_value import ALLOWEDVALUETYPE
from .. import runner, util
from .utils import default_outputs, model_experiment_ensemble, year_ranges, check_constraints
LOGGER = logging.getLogger("PYWPS")
class EnsClus(Process):
def __init__(self):
self.variables = ['pr', 'tas']
self.frequency = 'mon'
inputs = [
*model_experiment_ensemble(model='ACCESS1-0',
experiment='historical',
ensemble='r1i1p1',
min_occurs=3,
required_variables=self.variables,
required_frequency=self.frequency),
*year_ranges((1900, 2005)),
LiteralInput('variable',
'Variable',
abstract='Select the variable to simulate.',
data_type='string',
default='pr',
allowed_values=['pr', 'tas']),
LiteralInput(
'season',
'Season',
abstract='Choose a season like DJF.',
data_type='string',
allowed_values=['DJF', 'DJFM', 'NDJFM', 'JJA'],
default='JJA',
),
LiteralInput(
'area',
'Area',
abstract='Area over which to calculate.',
data_type='string',
allowed_values=['EU', 'EAT', 'PNA', 'NH'],
default='EU',
),
LiteralInput(
'extreme',
'Extreme',
abstract='Extreme metric.',
data_type='string',
allowed_values=[
'60th_percentile', '75th_percentile', '90th_percentile', 'mean', 'maximum', 'std', 'trend'
],
default='75th_percentile',
),
LiteralInput(
'numclus',
'Number of Clusters',
abstract='Number of clusters.',
data_type='integer',
default=2,
allowed_values=AllowedValue(allowed_type=ALLOWEDVALUETYPE.RANGE, minval=1, maxval=1000),
),
LiteralInput(
'perc',
'Percentage',
abstract='Percentage of total Variance',
data_type='integer',
default='80',
allowed_values=AllowedValue(allowed_type=ALLOWEDVALUETYPE.RANGE, minval=0, maxval=100),
),
LiteralInput(
'numpcs',
'Number of PCs',
abstract='Number of PCs to retain. Has priority over Percentage unless set to 0',
data_type='integer',
default='0',
allowed_values=AllowedValue(allowed_type=ALLOWEDVALUETYPE.RANGE, minval=0, maxval=1000),
),
]
outputs = [
ComplexOutput('plot',
'Output plot',
abstract='Generated output plot of ESMValTool processing.',
as_reference=True,
supported_formats=[Format('image/eps')]),
ComplexOutput('ens_extreme',
'ens_extreme',
abstract='Generated output data of ESMValTool processing.',
as_reference=True,
supported_formats=[FORMATS.NETCDF]),
ComplexOutput('ens_climatologies',
'ens_climatologies',
abstract='Generated output data of ESMValTool processing.',
as_reference=True,
supported_formats=[FORMATS.NETCDF]),
ComplexOutput('ens_anomalies',
'ens_anomalies',
abstract='Generated output data of ESMValTool processing.',
as_reference=True,
supported_formats=[FORMATS.NETCDF]),
ComplexOutput('statistics',
'Statistics',
abstract='Clustering Statistics',
as_reference=True,
supported_formats=[Format('text/plain')]),
ComplexOutput('archive',
'Archive',
abstract='The complete output of the ESMValTool processing as an zip archive.',
as_reference=True,
supported_formats=[Format('application/zip')]),
*default_outputs(),
]
super(EnsClus, self).__init__(
self._handler,
identifier="ensclus",
title="EnsClus - Ensemble Clustering",
version=runner.VERSION,
abstract="""Cluster analysis tool based on the k-means algorithm
for ensembles of climate model simulations. EnsClus group
ensemble members according to similar characteristics and
select the most representative member for each cluster. The estimated calculation
time of this process is 4 minutes for the default values supplied.
The Ensemble Clustering metric requires at least two models to be chosen,
choosing more models is supported.
""",
metadata=[
Metadata('ESMValTool', 'http://www.esmvaltool.org/'),
Metadata('Documentation',
'https://esmvaltool.readthedocs.io/en/v2.0a2/recipes/recipe_ensclus.html',
role=util.WPS_ROLE_DOC),
],
inputs=inputs,
outputs=outputs,
status_supported=True,
store_supported=True)
def _handler(self, request, response):
response.update_status("starting ...", 0)
# build esgf search constraints
constraints = dict(
models=request.inputs['model'],
ensembles=request.inputs['ensemble'],
experiments=request.inputs['experiment'],
)
check_constraints(constraints)
options = dict(
season=request.inputs['season'][0].data,
area=request.inputs['area'][0].data,
extreme=request.inputs['extreme'][0].data,
numclus=request.inputs['numclus'][0].data,
perc=request.inputs['perc'][0].data,
numpcs=request.inputs['numpcs'][0].data,
variable=request.inputs['variable'][0].data,
)
# generate recipe
response.update_status("generate recipe ...", 10)
recipe_file, config_file = runner.generate_recipe(
workdir=self.workdir,
diag='ensclus',
constraints=constraints,
start_year=request.inputs['start_year'][0].data,
end_year=request.inputs['end_year'][0].data,
output_format='png',
options=options,
)
# recipe output
response.outputs['recipe'].output_format = FORMATS.TEXT
response.outputs['recipe'].file = recipe_file
# run diag
response.update_status("running diagnostic (this could take a while)...", 20)
result = runner.run(recipe_file, config_file)
# log output
response.outputs['log'].output_format = FORMATS.TEXT
response.outputs['log'].file = result['logfile']
# debug log output
response.outputs['debug_log'].output_format = FORMATS.TEXT
response.outputs['debug_log'].file = result['debug_logfile']
response.outputs['success'].data = result['success']
if result['success']:
try:
self.get_outputs(result, response)
except Exception as e:
response.update_status("exception occured: " + str(e), 85)
else:
LOGGER.exception('esmvaltool failed!')
response.update_status("exception occured: " + result['exception'], 85)
response.update_status("creating archive of diagnostic result ...", 90)
response.outputs['archive'].output_format = Format('application/zip')
response.outputs['archive'].file = runner.compress_output(
os.path.join(self.workdir, 'output'),
os.path.join(self.workdir, 'ensemble_clustering_result.zip'))
response.update_status("done.", 100)
return response
def get_outputs(self, result, response):
# result plot
response.update_status("collecting output ...", 80)
response.outputs['plot'].output_format = Format('application/eps')
response.outputs['plot'].file = runner.get_output(result['plot_dir'],
path_filter=os.path.join('EnsClus', 'main'),
name_filter="anomalies*",
output_format="png")
response.outputs['ens_extreme'].output_format = FORMATS.NETCDF
response.outputs['ens_extreme'].file = runner.get_output(result['work_dir'],
path_filter=os.path.join('EnsClus', 'main'),
name_filter="ens_extreme*",
output_format="nc")
response.outputs['ens_climatologies'].output_format = FORMATS.NETCDF
response.outputs['ens_climatologies'].file = runner.get_output(result['work_dir'],
path_filter=os.path.join('EnsClus', 'main'),
name_filter="ens_anomalies*",
output_format="nc")
response.outputs['ens_anomalies'].output_format = FORMATS.NETCDF
response.outputs['ens_anomalies'].file = runner.get_output(result['work_dir'],
path_filter=os.path.join('EnsClus', 'main'),
name_filter="ens_anomalies*",
output_format="nc")
response.outputs['statistics'].output_format = FORMATS.TEXT
response.outputs['statistics'].file = runner.get_output(result['work_dir'],
path_filter=os.path.join('EnsClus', 'main'),
name_filter="statistics*",
output_format="txt")
|
from ..app import App
from ..model import Collection
from .base import AggregateProvider
class StorageAggregateProvider(AggregateProvider):
def aggregate(self, query=None, group=None, order_by=None, limit=None):
return self.storage.aggregate(
query, group=group, order_by=order_by, limit=limit
)
@App.aggregateprovider(model=Collection)
def get_aggregateprovider(context):
return StorageAggregateProvider(context)
|
import pygame as pg
vec = pg.math.Vector2
#Colors
RED = (255, 0, 0)
GREEN = (0, 255, 0)
#Game Settings
WIDTH = 1200
HEIGHT = 900
FPS = 120
#Arrow Settings
SPEED = [600, 1000] |
import requests
# Link to request data about the characters
url = 'http://anapioficeandfire.com/api/characters?name='
# Get the character name to search for
name = input('Enter a name to search for: ')
url += name
# Make an HTTP request and the API will respond with a JSON output
response = requests.get(url)
# Get our data on Jon Snow
data = response.json() # returns a list of characters from search query
if (len(data) == 0):
print('No character found by that name')
elif (len(data) == 1):
character = data[0]
print(name + ' is played by ' + ', '.join(character['playedBy']) + ' and is found in seasons ' + ', '.join(character['tvSeries']))
print(name + ' has the following aliases:')
for alias in character['aliases']:
print('\t- ' + alias)
print('\nMore information on ' + name + ' can be found here: ' + character['url'])
else:
print('Multiple characters were found by that name:')
for character in data:
print('\t- ' + character['name'] + ': ' + character['url']) |
# Copyright (c) 2013, FinByz Tech Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt
from frappe import msgprint, _
def execute(filters=None):
return _execute(filters, additional_table_columns=[
dict(fieldtype='Data', label='Customer GSTIN', fieldname="customer_gstin", width=120),
dict(fieldtype='Data', label='Billing Address GSTIN', fieldname="billing_address_gstin", width=140),
dict(fieldtype='Data', label='Company GSTIN', fieldname="company_gstin", width=120),
dict(fieldtype='Data', label='Place of Supply', fieldname="place_of_supply", width=120),
dict(fieldtype='Data', label='Reverse Charge', fieldname="reverse_charge", width=120),
dict(fieldtype='Data', label='Invoice Type', fieldname="invoice_type", width=120),
dict(fieldtype='Data', label='Export Type', fieldname="export_type", width=120),
dict(fieldtype='Data', label='E-Commerce GSTIN', fieldname="ecommerce_gstin", width=130)
], additional_query_columns=[
'customer_gstin',
'billing_address_gstin',
'company_gstin',
'place_of_supply',
'reverse_charge',
'invoice_type',
'export_type',
'ecommerce_gstin'
])
def _execute(filters, additional_table_columns=None, additional_query_columns=None):
if not filters: filters = frappe._dict({})
invoice_list = get_invoices(filters, additional_query_columns)
columns, income_accounts, tax_accounts = get_columns(invoice_list, additional_table_columns)
if not invoice_list:
msgprint(_("No record found"))
return columns, invoice_list
invoice_income_map = get_invoice_income_map(invoice_list)
invoice_income_map, invoice_tax_map = get_invoice_tax_map(invoice_list,
invoice_income_map, income_accounts)
#Cost Center & Warehouse Map
invoice_cc_wh_map = get_invoice_cc_wh_map(invoice_list)
invoice_so_dn_map = get_invoice_so_dn_map(invoice_list)
company_currency = frappe.get_cached_value('Company', filters.get("company"), "default_currency")
mode_of_payments = get_mode_of_payments([inv.name for inv in invoice_list])
data = []
for inv in invoice_list:
# invoice details
sales_order = list(set(invoice_so_dn_map.get(inv.name, {}).get("sales_order", [])))
delivery_note = list(set(invoice_so_dn_map.get(inv.name, {}).get("delivery_note", [])))
cost_center = list(set(invoice_cc_wh_map.get(inv.name, {}).get("cost_center", [])))
warehouse = list(set(invoice_cc_wh_map.get(inv.name, {}).get("warehouse", [])))
row = [
inv.name, inv.posting_date, inv.customer, inv.customer_name
]
if additional_query_columns:
for col in additional_query_columns:
row.append(inv.get(col))
row +=[
inv.get("customer_type"),
inv.get("territory"),
inv.get("tax_id"),
inv.debit_to, ", ".join(mode_of_payments.get(inv.name, [])),
inv.project, inv.owner, inv.remarks,
", ".join(sales_order), ", ".join(delivery_note),", ".join(cost_center),
", ".join(warehouse), company_currency
]
# map income values
base_net_total = 0
for income_acc in income_accounts:
income_amount = flt(invoice_income_map.get(inv.name, {}).get(income_acc))
base_net_total += income_amount
row.append(income_amount)
# net total
row.append(base_net_total or inv.base_net_total)
# tax account
total_tax = 0
for tax_acc in tax_accounts:
if tax_acc not in income_accounts:
tax_amount = flt(invoice_tax_map.get(inv.name, {}).get(tax_acc))
total_tax += tax_amount
row.append(tax_amount)
# total tax, grand total, outstanding amount & rounded total
row += [total_tax, inv.base_grand_total, inv.base_rounded_total, inv.outstanding_amount]
data.append(row)
return columns, data
def get_columns(invoice_list, additional_table_columns):
"""return columns based on filters"""
columns = [
_("Invoice") + ":Link/Sales Invoice:120", _("Posting Date") + ":Date:80",
_("Customer") + ":Link/Customer:120", _("Customer Name") + "::120"
]
if additional_table_columns:
columns += additional_table_columns
columns +=[
_("Customer Type") + ":Data:120", _("Territory") + ":Link/Territory:80",
_("Tax Id") + "::80", _("Receivable Account") + ":Link/Account:120", _("Mode of Payment") + "::120",
_("Project") +":Link/Project:80", _("Owner") + "::150", _("Remarks") + "::150",
_("Sales Order") + ":Link/Sales Order:100", _("Delivery Note") + ":Link/Delivery Note:100",
_("Cost Center") + ":Link/Cost Center:100", _("Warehouse") + ":Link/Warehouse:100",
{
"fieldname": "currency",
"label": _("Currency"),
"fieldtype": "Data",
"width": 80
}
]
income_accounts = tax_accounts = income_columns = tax_columns = []
if invoice_list:
income_accounts = frappe.db.sql_list("""select distinct income_account
from `tabSales Invoice Item` where docstatus = 1 and parent in (%s)
order by income_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
tax_accounts = frappe.db.sql_list("""select distinct account_head
from `tabSales Taxes and Charges` where parenttype = 'Sales Invoice'
and docstatus = 1 and base_tax_amount_after_discount_amount != 0
and parent in (%s) order by account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
income_columns = [(account + ":Currency/currency:120") for account in income_accounts]
for account in tax_accounts:
if account not in income_accounts:
tax_columns.append(account + ":Currency/currency:120")
columns = columns + income_columns + [_("Net Total") + ":Currency/currency:120"] + tax_columns + \
[_("Total Tax") + ":Currency/currency:120", _("Grand Total") + ":Currency/currency:120",
_("Rounded Total") + ":Currency/currency:120", _("Outstanding Amount") + ":Currency/currency:120"]
return columns, income_accounts, tax_accounts
def get_conditions(filters):
conditions = ""
if filters.get("company"): conditions += " and company=%(company)s"
if filters.get("customer"): conditions += " and customer = %(customer)s"
if filters.get("from_date"): conditions += " and posting_date >= %(from_date)s"
if filters.get("to_date"): conditions += " and posting_date <= %(to_date)s"
if filters.get("owner"): conditions += " and owner = %(owner)s"
if filters.get("mode_of_payment"):
conditions += """ and exists(select name from `tabSales Invoice Payment`
where parent=`tabSales Invoice`.name
and ifnull(`tabSales Invoice Payment`.mode_of_payment, '') = %(mode_of_payment)s)"""
if filters.get("cost_center"):
conditions += """ and exists(select name from `tabSales Invoice Item`
where parent=`tabSales Invoice`.name
and ifnull(`tabSales Invoice Item`.cost_center, '') = %(cost_center)s)"""
if filters.get("warehouse"):
conditions += """ and exists(select name from `tabSales Invoice Item`
where parent=`tabSales Invoice`.name
and ifnull(`tabSales Invoice Item`.warehouse, '') = %(warehouse)s)"""
if filters.get("brand"):
conditions += """ and exists(select name from `tabSales Invoice Item`
where parent=`tabSales Invoice`.name
and ifnull(`tabSales Invoice Item`.brand, '') = %(brand)s)"""
if filters.get("item_group"):
conditions += """ and exists(select name from `tabSales Invoice Item`
where parent=`tabSales Invoice`.name
and ifnull(`tabSales Invoice Item`.item_group, '') = %(item_group)s)"""
return conditions
def get_invoices(filters, additional_query_columns):
if additional_query_columns:
additional_query_columns = ', ' + ', '.join(additional_query_columns)
conditions = get_conditions(filters)
return frappe.db.sql("""
select name, posting_date, debit_to, project, customer,
customer_name, owner, remarks, territory, tax_id, (select customer_type from `tabCustomer` where `tabCustomer`.name = `tabSales Invoice`.customer) as customer_type,
base_net_total, base_grand_total, base_rounded_total, outstanding_amount {0}
from `tabSales Invoice`
where docstatus = 1 %s order by posting_date desc, name desc""".format(additional_query_columns or '') %
conditions, filters, as_dict=1)
def get_invoice_income_map(invoice_list):
income_details = frappe.db.sql("""select parent, income_account, sum(base_net_amount) as amount
from `tabSales Invoice Item` where parent in (%s) group by parent, income_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_income_map = {}
for d in income_details:
invoice_income_map.setdefault(d.parent, frappe._dict()).setdefault(d.income_account, [])
invoice_income_map[d.parent][d.income_account] = flt(d.amount)
return invoice_income_map
def get_invoice_tax_map(invoice_list, invoice_income_map, income_accounts):
tax_details = frappe.db.sql("""select parent, account_head,
sum(base_tax_amount_after_discount_amount) as tax_amount
from `tabSales Taxes and Charges` where parent in (%s) group by parent, account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_tax_map = {}
for d in tax_details:
if d.account_head in income_accounts:
if d.account_head in invoice_income_map[d.parent]:
invoice_income_map[d.parent][d.account_head] += flt(d.tax_amount)
else:
invoice_income_map[d.parent][d.account_head] = flt(d.tax_amount)
else:
invoice_tax_map.setdefault(d.parent, frappe._dict()).setdefault(d.account_head, [])
invoice_tax_map[d.parent][d.account_head] = flt(d.tax_amount)
return invoice_income_map, invoice_tax_map
def get_invoice_so_dn_map(invoice_list):
si_items = frappe.db.sql("""select parent, sales_order, delivery_note, so_detail
from `tabSales Invoice Item` where parent in (%s)
and (ifnull(sales_order, '') != '' or ifnull(delivery_note, '') != '')""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_so_dn_map = {}
for d in si_items:
if d.sales_order:
invoice_so_dn_map.setdefault(d.parent, frappe._dict()).setdefault(
"sales_order", []).append(d.sales_order)
delivery_note_list = None
if d.delivery_note:
delivery_note_list = [d.delivery_note]
elif d.sales_order:
delivery_note_list = frappe.db.sql_list("""select distinct parent from `tabDelivery Note Item`
where docstatus=1 and so_detail=%s""", d.so_detail)
if delivery_note_list:
invoice_so_dn_map.setdefault(d.parent, frappe._dict()).setdefault("delivery_note", delivery_note_list)
return invoice_so_dn_map
def get_invoice_cc_wh_map(invoice_list):
si_items = frappe.db.sql("""select parent, cost_center, warehouse
from `tabSales Invoice Item` where parent in (%s)
and (ifnull(cost_center, '') != '' or ifnull(warehouse, '') != '')""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_cc_wh_map = {}
for d in si_items:
if d.cost_center:
invoice_cc_wh_map.setdefault(d.parent, frappe._dict()).setdefault(
"cost_center", []).append(d.cost_center)
if d.warehouse:
invoice_cc_wh_map.setdefault(d.parent, frappe._dict()).setdefault(
"warehouse", []).append(d.warehouse)
return invoice_cc_wh_map
def get_mode_of_payments(invoice_list):
mode_of_payments = {}
if invoice_list:
inv_mop = frappe.db.sql("""select parent, mode_of_payment
from `tabSales Invoice Payment` where parent in (%s) group by parent, mode_of_payment""" %
', '.join(['%s']*len(invoice_list)), tuple(invoice_list), as_dict=1)
for d in inv_mop:
mode_of_payments.setdefault(d.parent, []).append(d.mode_of_payment)
return mode_of_payments
|
"""Django REST Framework like model viewsets."""
from functools import update_wrapper
from inspect import getmembers
from django.core.exceptions import ImproperlyConfigured
from django.utils.decorators import classonlymethod
from django.views.generic.base import TemplateResponseMixin, View
from . import mixins
class GenericViewSet(TemplateResponseMixin, View):
"""Base class for all sqlalchemy model generic viewsets."""
def get_template_names(self):
self.template_name_suffix = "_" + self.action
names = []
try:
names.extend(super().get_template_names())
except ImproperlyConfigured:
pass
if hasattr(self, "get_model_template_name"):
names.append(self.get_model_template_name())
return names
@classonlymethod
def as_view(cls, actions=None, **initkwargs):
# The suffix initkwarg is reserved for displaying the viewset type.
# eg. 'List' or 'Instance'.
cls.suffix = None
# The detail initkwarg is reserved for introspecting the viewset type.
cls.detail = None
# Setting a basename allows a view to reverse its action urls. This
# value is provided by the router through the initkwargs.
cls.basename = None
# actions must not be empty
if not actions:
raise TypeError(
"The `actions` argument must be provided when "
"calling `.as_view()` on a ViewSet. For example "
"`.as_view({'get': 'list'})`"
)
# sanitize keyword arguments
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError(
"You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that." % (key, cls.__name__)
)
if not hasattr(cls, key):
raise TypeError("{}() received an invalid keyword {!r}".format(cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
# We also store the mapping of request methods to actions,
# so that we can later set the action attribute.
# eg. `self.action = 'list'` on an incoming GET request.
self.action_map = actions
method = request.method.lower()
self.action = self.action_map.get(method, "metadata")
# Bind methods to actions
# This is the bit that's different to a standard view
for method, action in actions.items():
handler = getattr(self, action)
setattr(self, method, handler)
if hasattr(self, "get") and not hasattr(self, "head"):
self.head = self.get
self.request = request
self.args = args
self.kwargs = kwargs
# And continue as usual
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
# We need to set these on the view function, so that breadcrumb
# generation can pick out these bits of information from a
# resolved URL.
view.cls = cls
view.initkwargs = initkwargs
view.suffix = initkwargs.get("suffix", None)
view.actions = actions
return view
@classmethod
def get_extra_actions(cls):
"""Get the methods that are marked as an extra ViewSet `@action`."""
return [method for _, method in getmembers(cls, lambda attr: hasattr(attr, "bind_to_methods"))]
class ReadOnlyModelViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, GenericViewSet):
"""A viewset that provides default `list()` and `retrieve()` actions.
When used with router, it will map the following operations to actions on the viewset
====== ======================== =============== ======================
Method Path Action Route Name
====== ======================== =============== ======================
GET / list <resource name>-list
GET /<pk>/ retrieve <resource name>-detail
====== ======================== =============== ======================
"""
class ModelViewSet(mixins.CreateModelMixin, mixins.UpdateModelMixin, mixins.DeleteModelMixin, ReadOnlyModelViewSet):
"""A viewset that provides default `new()`, `create()`, `retrieve()`,
`edit()`, update()`, `confirm_destroy())`, `destroy()` and `list()`
actions.
When used with router, it will map the following operations to actions on the viewset
====== ======================== =============== ======================
Method Path Action Route Name
====== ======================== =============== ======================
GET / list <resource name>-list
POST / create <resource name>-list
GET /new/ new <resource name>-new
GET /<pk>/ retrieve <resource name>-detail
POST /<pk>/ update <resource name>-detail
PUT /<pk>/ update <resource name>-detail
PATCH /<pk>/ update <resource name>-detail
DELETE /<pk>/ destroy <resource name>-detail
GET /<pk>/edit/ edit <resource name>-edit
GET /<pk>/delete/ confirm_destoy <resource name>-delete
POST /<pk>/delete/ destroy <resource name>-delete
====== ======================== =============== ======================
"""
|
if __name__ == '__main__':
import numpy as np
import pandas as pd
import os
print('\n Memory Pressure Test Starts...\n')
for i in os.listdir():
if 'mprofile_' in i:
df = pd.read_csv(i, sep=' ', error_bad_lines=False)
df.columns = ['null', 'memory', 'time']
df.drop('null', 1, inplace=True)
std_limit = 5
highest_limit = 800
std = np.std(np.array(df.memory.values[1500:]))
highest = df.memory.max()
if std > std_limit:
raise Exception('MEMORY TEST FAILED: Standard deviation of memory pressure is %d which is above the %d limit' % (std, std_limit))
if highest > highest_limit:
raise Exception('MEMORY TEST FAILED: Max memory is %d which is above the %d limit' % (highest, highest_limit))
print("\n Memory Pressure Test Passed \n")
|
## patch the interface of Neutron::Events boost python binding
from mcni.mcnibp import vector_Event, Position_double, Velocity_double, NeutronState, NeutronSpin
original__str__ = vector_Event.__str__
def __str__(self):
if len(self)>10: return original__str__(self)
return ', '.join( [ '%s' % n for n in self ] )
vector_Event.__str__ = __str__
def vector3_str(self):
return str(list(self))
Position_double.__str__ = Velocity_double.__str__ = vector3_str
Position_double.__repr__ = Velocity_double.__repr__ = vector3_str
def spin_str(self):
return '(%s, %s)' % (self.s1, self.s2)
NeutronSpin.__str__ = NeutronSpin.__repr__ = spin_str
def state_str(self):
return "State(position=%r, velocity=%r, spin=%r)" % (
self.position, self.velocity, self.spin)
NeutronState.__str__ = state_str
from mcni.mcnibp import NeutronEventBuffer
def NEB_appendNeutrons(self, neutrons, startindex=None, endindex=None):
"""append neutrons to the end of this neutron buffer
neutrons: the neutron buffer from which the new neutrons are to be obtained
and appended to this buffer
startindex, endindex: define the region from which neutrons are obtained
"""
if startindex is None:
startindex = 0
if endindex is None:
endindex = len(neutrons)
if not isinstance(endindex, int):
_ = endindex
endindex = int(_)
assert _==endindex
self.append(neutrons, startindex, endindex)
return
NeutronEventBuffer.appendNeutrons = NEB_appendNeutrons
NEB_snapshot_o = NeutronEventBuffer.snapshot
def NEB_snapshot(self, n=None):
"""take a snapshot of this neutron buffer, remove the invalid
ones (prob<0), and return a new neutron buffer.
n: the number of neutrons in this buffer from which
the snapshot will be take. None means all neutrons
"""
if n is None:
n = len(self)
return NEB_snapshot_o(self, n)
NeutronEventBuffer.snapshot = NEB_snapshot
def NEB_tonpyarr(self):
from mcni.neutron_storage import neutrons_as_npyarr, ndblsperneutron
arr = neutrons_as_npyarr(self)
arr.shape = -1, ndblsperneutron
return arr
NeutronEventBuffer.to_npyarr = NEB_tonpyarr
def NEB_fromnpyarr(self, arr):
#
from mcni.neutron_storage import ndblsperneutron
arr.shape = -1, ndblsperneutron
# # of events
N = len(arr)
# cevents
cevents = cevents_from_npyarr(arr)
# resize myself so we can accept events from array
from mcni import neutron
ev = neutron()
self.resize(N, ev)
# copy
self.fromCevents(cevents, N)
return
NeutronEventBuffer.from_npyarr = NEB_fromnpyarr
def cevents_from_npyarr(npyarr):
'''convert a numpy array to a boost-python instance of Neutron::cEvent pointer'''
try:
from danse.ins.numpyext import getdataptr
except ImportError:
from numpyext import getdataptr
import warnings
warnings.warn("Using old numpyext. Should use danse.ins.numpyext")
ptr = getdataptr( npyarr )
try:
from danse.ins import bpext
except ImportError:
import bpext
import warnings
warnings.warn("Using old bpext. Should use danse.ins.bpext")
import mcni._mcni
cevents = bpext.wrap_ptr( ptr, 'cNeutronEvent' )
cevents.origin = npyarr
return cevents
|
rooms = {}
def save(room_id, room):
rooms[room_id] = room
def delete(room_id):
return rooms.pop(room_id)
def update(room_id, room):
save(room_id, room)
def get(room_id):
return rooms[room_id] |
import itertools
import numpy as np
from sklearn.linear_model import SGDClassifier, SGDRanking
from sklearn import metrics
from minirank.compat import RankSVM as MinirankSVM
from scipy import stats
def transform_pairwise(X, y):
"""Transforms data into pairs with balanced labels for ranking
Transforms a n-class ranking problem into a two-class classification
problem. Subclasses implementing particular strategies for choosing
pairs should override this method.
In this method, all pairs are choosen, except for those that have the
same target value. The output is an array of balanced classes, i.e.
there are the same number of -1 as +1
Parameters
----------
X : array, shape (n_samples, n_features)
The data
y : array, shape (n_samples,) or (n_samples, 2)
Target labels. If it's a 2D array, the second column represents
the grouping of samples, i.e., samples with different groups will
not be considered.
Returns
-------
X_trans : array, shape (k, n_feaures)
Data as pairs
y_trans : array, shape (k,)
Output class labels, where classes have values {-1, +1}
"""
X_new = []
y_new = []
y = np.asarray(y)
if y.ndim == 1:
y = np.c_[y, np.ones(y.shape[0])]
comb = itertools.combinations(range(X.shape[0]), 2)
for k, (i, j) in enumerate(comb):
if y[i, 0] == y[j, 0] or y[i, 1] != y[j, 1]:
# skip if same target or different group
continue
X_new.append(X[i] - X[j])
y_new.append(np.sign(y[i, 0] - y[j, 0]))
# output balanced classes
if y_new[-1] != (-1) ** k:
y_new[-1] = - y_new[-1]
X_new[-1] = - X_new[-1]
return np.asarray(X_new), np.asarray(y_new).ravel()
class RankSVM(SGDClassifier):
"""Performs pairwise ranking with an underlying SGDClassifer model
Input should be a n-class ranking problem, this object will convert it
into a two-class classification problem, a setting known as
`pairwise ranking`.
Authors: Fabian Pedregosa <fabian@fseoane.net>
Alexandre Gramfort <alexandre.gramfort@inria.fr>
https://gist.github.com/2071994
"""
def fit(self, X, y):
"""
Fit a pairwise ranking model.
Parameters
----------
X : array, shape (n_samples, n_features)
y : array, shape (n_samples,) or (n_samples, 2)
Returns
-------
self
"""
X_trans, y_trans = transform_pairwise(X, y)
super(RankSVM, self).fit(X_trans, y_trans)
return self
def predict(self, X):
pred = super(RankSVM, self).predict(X)
# preds are mapped to {-1,1}
# FIXME only works in this example!!!
pred[pred == -1] = 0
return pred
def score(self, X, y):
"""
Because we transformed into a pairwise problem, chance level is at 0.5
"""
X_trans, y_trans = transform_pairwise(X, y)
return np.mean(super(RankSVM, self).predict(X_trans) == y_trans)
def rank(clf,X):
if clf.coef_.shape[0] == 1:
coef = clf.coef_[0]
else:
coef = clf.coef_
order = np.argsort(np.dot(X,coef))
order_inv = np.zeros_like(order)
order_inv[order] = np.arange(len(order))
return order_inv
def kendalltau(clf,X,y):
if clf.coef_.shape[0] == 1:
coef = clf.coef_[0]
else:
coef = clf.coef_
tau, _ = stats.kendalltau(np.dot(X, coef), y)
return np.abs(tau)
if __name__=="__main__":
rs = np.random.RandomState(0)
n_samples_1 = 10000
n_samples_2 = 100
X = np.r_[1.5 * rs.randn(n_samples_1, 2),
0.5 * rs.randn(n_samples_2, 2) + [2, 2]]
y = np.array([0] * (n_samples_1) + [1] * (n_samples_2))
idx = np.arange(y.shape[0])
rs.shuffle(idx)
X = X[idx]
y = y[idx]
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X - mean) / std
for clf, name in ((SGDClassifier(n_iter=100, alpha=0.01), "plain sgd"),
(SGDClassifier(n_iter=100, alpha=0.01,
class_weight={1: 10}),"weighted sgd"),
(SGDRanking(n_iter=1000, alpha=0.01,
loss='roc_pairwise_ranking'), "pairwise sgd"),
(RankSVM(n_iter=100, alpha=0.01, loss='hinge'), 'RankSVM'),
):
clf.fit(X, y)
print clf
pred = clf.predict(X)
print "ACC: %.4f" % metrics.zero_one_score(y, pred)
print "AUC: %.4f" % metrics.auc_score(y, pred)
print "CONFUSION MATRIX: "
print metrics.confusion_matrix(y, pred)
print "Kendall Tau: %.4f" % kendalltau(clf,X,y)
print 80*'='
clf = MinirankSVM(max_iter=100, alpha=0.01).fit(X,y)
print clf
scores = np.dot(X,clf.coef_)
pred = (scores > 0).astype(np.int)
print "ACC: %.4f" % metrics.zero_one_score(y, pred)
print "AUC: %.4f" % metrics.auc_score(y, pred)
print "CONFUSION MATRIX: "
print metrics.confusion_matrix(y, pred)
print "Kendall Tau: %.4f" % kendalltau(clf,X,y)
print 80*'=' |
from django import template
from django.db.models import Q
from notifications.models import Notification
register = template.Library()
@register.simple_tag
def notifications(user):
return (Notification.objects.filter(user=user)
.order_by('-created_at'))[:8]
@register.simple_tag
def notifs_unread(user):
return (Notification.objects
.filter(Q(user=user) & Q(is_seen=False)).count())
|
from google.appengine.ext import ndb
class User(ndb.Model):
username = ndb.StringProperty(required=True)
hash = ndb.StringProperty(required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
|
"""
LTI consumer CMS plugin
"""
from django.utils.translation import gettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from richie.apps.core.defaults import PLUGINS_GROUP
from .forms import LTIConsumerForm
from .models import LTIConsumer
@plugin_pool.register_plugin
class LTIConsumerPlugin(CMSPluginBase):
"""
A plugin to consume LTI content.
"""
allow_children = False
cache = True
disable_child_plugins = True
form = LTIConsumerForm
model = LTIConsumer
module = PLUGINS_GROUP
name = _("LTI consumer")
render_template = "richie/lti_consumer/lti_consumer.html"
class Media:
"""
Simple UX improvement that hides useless fields if a predefined LTI provider is used
"""
js = ("lti_consumer/js/change_form.js",)
|
# Copyright (c) 2017, MD2K Center of Excellence
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class data_descriptor:
@staticmethod
def get_data_descriptor(dd_type: str, dd_unit: str, dd_labels: dict) -> dict:
"""
Please have a look at /kernel/schema/examples/ for schema and field details
:param dd_type:
:param dd_unit:
:param dd_labels:
:return:
"""
if not dd_type:
raise ValueError("Type cannot be empty.")
elif not dd_unit:
raise ValueError("Unit cannot be empty.")
elif not dd_labels:
raise ValueError("Labels name cannot be empty.")
else:
data_descriptor = {
"data_descriptor": [
{
"type": dd_type,
"unit": dd_unit,
"labels": dd_labels
}
]
}
return data_descriptor
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.